diff --git a/.github/README.md b/.github/README.md new file mode 120000 index 000000000000..e5c578ba74b5 --- /dev/null +++ b/.github/README.md @@ -0,0 +1 @@ +../doc/benchcoin.md \ No newline at end of file diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml new file mode 100644 index 000000000000..f75fcafa2b90 --- /dev/null +++ b/.github/workflows/benchmark.yml @@ -0,0 +1,169 @@ +name: Benchmark +on: + pull_request: + branches: + - master + +jobs: + build-binary: + runs-on: [self-hosted, linux, x64] + steps: + - name: Checkout repo + uses: actions/checkout@v4 + with: + fetch-depth: 1 + + - name: Build PR binary + run: | + HEAD_SHA=$(git rev-parse HEAD) + echo "HEAD_SHA=${HEAD_SHA}" >> "$GITHUB_ENV" + nix develop --command python3 bench.py build \ + -o ${{ runner.temp }}/binaries \ + ${HEAD_SHA}:pr + + - name: Upload binaries + uses: actions/upload-artifact@v4 + with: + name: bitcoind-binaries + path: ${{ runner.temp }}/binaries/ + + benchmark: + needs: build-binary + strategy: + matrix: + # Matrix entries from configs/pr.toml: dbcache=[450,32000] x instrumentation=[uninstrumented,instrumented] + name: [450-uninstrumented, 450-instrumented, 32000-uninstrumented, 32000-instrumented] + runs-on: [self-hosted, linux, x64] + timeout-minutes: 600 + env: + ORIGINAL_DATADIR: /data/pruned-840k + steps: + - name: Checkout repo + uses: actions/checkout@v4 + with: + fetch-depth: 1 + + - name: Download binaries + uses: actions/download-artifact@v4 + with: + name: bitcoind-binaries + path: ${{ runner.temp }}/binaries + + - name: Set binary permissions + run: | + chmod +x ${{ runner.temp }}/binaries/pr/bitcoind + + - name: Run benchmark + run: | + nix develop --command python3 bench.py run \ + --benchmark-config bench/configs/pr.toml \ + --matrix-entry ${{ matrix.name }} \ + --datadir $ORIGINAL_DATADIR \ + --tmp-datadir ${{ runner.temp }}/datadir \ + --output-dir ${{ runner.temp }}/output \ + pr:${{ runner.temp }}/binaries/pr/bitcoind + + - name: Upload results + uses: actions/upload-artifact@v4 + with: + name: result-${{ matrix.name }} + path: ${{ runner.temp }}/output/results.json + + - name: Upload flamegraphs + uses: actions/upload-artifact@v4 + with: + name: flamegraph-${{ matrix.name }} + path: ${{ runner.temp }}/output/*-flamegraph.svg + if-no-files-found: ignore + + - name: Upload debug logs + uses: actions/upload-artifact@v4 + with: + name: debug-logs-${{ matrix.name }} + path: ${{ runner.temp }}/output/*-debug.log + if-no-files-found: ignore + + - name: Write context metadata + env: + GITHUB_CONTEXT: ${{ toJSON(github) }} + RUNNER_CONTEXT: ${{ toJSON(runner) }} + run: | + mkdir -p ${{ runner.temp }}/contexts + echo "$GITHUB_CONTEXT" | nix develop --command jq "del(.token)" > ${{ runner.temp }}/contexts/github.json + echo "$RUNNER_CONTEXT" > ${{ runner.temp }}/contexts/runner.json + + - name: Upload context metadata + uses: actions/upload-artifact@v4 + with: + name: run-metadata-${{ matrix.name }} + path: ${{ runner.temp }}/contexts/ + + benchmark-noav: + needs: build-binary + strategy: + matrix: + name: [450-uninstrumented, 32000-uninstrumented] + runs-on: [self-hosted, linux, x64] + timeout-minutes: 600 + env: + ORIGINAL_DATADIR: /data/pruned-840k + steps: + - name: Checkout repo + uses: actions/checkout@v4 + with: + fetch-depth: 1 + + - name: Download binaries + uses: actions/download-artifact@v4 + with: + name: bitcoind-binaries + path: ${{ runner.temp }}/binaries + + - name: Set binary permissions + run: | + chmod +x ${{ runner.temp }}/binaries/pr/bitcoind + + - name: Run benchmark + run: | + nix develop --command python3 bench.py run \ + --benchmark-config bench/configs/pr-noassumevalid.toml \ + --matrix-entry ${{ matrix.name }} \ + --datadir $ORIGINAL_DATADIR \ + --tmp-datadir ${{ runner.temp }}/datadir \ + --output-dir ${{ runner.temp }}/output \ + pr:${{ runner.temp }}/binaries/pr/bitcoind + + - name: Upload results + uses: actions/upload-artifact@v4 + with: + name: result-noav-${{ matrix.name }} + path: ${{ runner.temp }}/output/results.json + + - name: Upload flamegraphs + uses: actions/upload-artifact@v4 + with: + name: flamegraph-noav-${{ matrix.name }} + path: ${{ runner.temp }}/output/*-flamegraph.svg + if-no-files-found: ignore + + - name: Upload debug logs + uses: actions/upload-artifact@v4 + with: + name: debug-logs-noav-${{ matrix.name }} + path: ${{ runner.temp }}/output/*-debug.log + if-no-files-found: ignore + + - name: Write context metadata + env: + GITHUB_CONTEXT: ${{ toJSON(github) }} + RUNNER_CONTEXT: ${{ toJSON(runner) }} + run: | + mkdir -p ${{ runner.temp }}/contexts + echo "$GITHUB_CONTEXT" | nix develop --command jq "del(.token)" > ${{ runner.temp }}/contexts/github.json + echo "$RUNNER_CONTEXT" > ${{ runner.temp }}/contexts/runner.json + + - name: Upload context metadata + uses: actions/upload-artifact@v4 + with: + name: run-metadata-noav-${{ matrix.name }} + path: ${{ runner.temp }}/contexts/ diff --git a/.github/workflows/nightly-benchmark.yml b/.github/workflows/nightly-benchmark.yml new file mode 100644 index 000000000000..7ee1930463f4 --- /dev/null +++ b/.github/workflows/nightly-benchmark.yml @@ -0,0 +1,265 @@ +name: Nightly Benchmark +on: + workflow_run: + workflows: ["Nightly Rebase"] + types: [completed] + workflow_dispatch: + +jobs: + build: + if: ${{ github.event.workflow_run.conclusion == 'success' || github.event_name == 'workflow_dispatch' }} + runs-on: [self-hosted, linux, x64] + steps: + - name: Checkout repo + uses: actions/checkout@v4 + with: + fetch-depth: 50 # Need history for merge-base + + - name: Get commit SHAs + run: | + # Benchcoin commit (for building) + echo "BENCHCOIN_SHA=$(git rev-parse HEAD)" >> "$GITHUB_ENV" + + # Bitcoin commit - find merge-base with upstream + git remote add upstream https://github.com/bitcoin/bitcoin.git + git fetch upstream master + BITCOIN_SHA=$(git merge-base HEAD upstream/master) + echo "BITCOIN_SHA=$BITCOIN_SHA" >> "$GITHUB_ENV" + + # Get commit date for the Bitcoin commit (for chart X-axis) + COMMIT_DATE=$(git log -1 --format=%cd --date=short "$BITCOIN_SHA") + echo "COMMIT_DATE=$COMMIT_DATE" >> "$GITHUB_ENV" + + echo "Benchcoin: $(git rev-parse HEAD)" + echo "Bitcoin merge-base: $BITCOIN_SHA" + echo "Commit date: $COMMIT_DATE" + + - name: Build master binary + run: | + nix develop --command python3 bench.py build \ + -o ${{ runner.temp }}/binaries \ + $BENCHCOIN_SHA:master + + - name: Upload binaries + uses: actions/upload-artifact@v4 + with: + name: nightly-binaries + path: ${{ runner.temp }}/binaries/ + + - name: Upload commit info + run: | + echo "$BITCOIN_SHA" > ${{ runner.temp }}/commit.txt + echo "$COMMIT_DATE" > ${{ runner.temp }}/commit-date.txt + - uses: actions/upload-artifact@v4 + with: + name: commit-info + path: | + ${{ runner.temp }}/commit.txt + ${{ runner.temp }}/commit-date.txt + + - name: Capture machine specs + run: | + nix develop --command python3 -c " + from bench.machine import get_machine_specs + import json + print(json.dumps(get_machine_specs().to_dict())) + " > ${{ runner.temp }}/machine-specs.json + + - name: Upload machine specs + uses: actions/upload-artifact@v4 + with: + name: machine-specs + path: ${{ runner.temp }}/machine-specs.json + + benchmark-450: + needs: build + runs-on: [self-hosted, linux, x64] + timeout-minutes: 600 + env: + ORIGINAL_DATADIR: /data/pruned-840k + steps: + - name: Checkout repo + uses: actions/checkout@v4 + with: + fetch-depth: 1 + + - name: Download binaries + uses: actions/download-artifact@v4 + with: + name: nightly-binaries + path: ${{ runner.temp }}/binaries + + - name: Set binary permissions + run: | + chmod +x ${{ runner.temp }}/binaries/master/bitcoind + + - name: Run benchmark + run: | + nix develop --command python3 bench.py run \ + --benchmark-config bench/configs/nightly.toml \ + --matrix-entry 450 \ + --datadir $ORIGINAL_DATADIR \ + --tmp-datadir ${{ runner.temp }}/datadir \ + --output-dir ${{ runner.temp }}/output \ + master:${{ runner.temp }}/binaries/master/bitcoind + + - name: Upload results + uses: actions/upload-artifact@v4 + with: + name: result-nightly-450 + path: ${{ runner.temp }}/output/results.json + + - name: Upload debug logs + uses: actions/upload-artifact@v4 + with: + name: debug-logs-nightly-450 + path: ${{ runner.temp }}/output/*-debug.log + if-no-files-found: ignore + + benchmark-32000: + needs: benchmark-450 + runs-on: [self-hosted, linux, x64] + timeout-minutes: 600 + env: + ORIGINAL_DATADIR: /data/pruned-840k + steps: + - name: Checkout repo + uses: actions/checkout@v4 + with: + fetch-depth: 1 + + - name: Download binaries + uses: actions/download-artifact@v4 + with: + name: nightly-binaries + path: ${{ runner.temp }}/binaries + + - name: Set binary permissions + run: | + chmod +x ${{ runner.temp }}/binaries/master/bitcoind + + - name: Run benchmark + run: | + nix develop --command python3 bench.py run \ + --benchmark-config bench/configs/nightly.toml \ + --matrix-entry 32000 \ + --datadir $ORIGINAL_DATADIR \ + --tmp-datadir ${{ runner.temp }}/datadir \ + --output-dir ${{ runner.temp }}/output \ + master:${{ runner.temp }}/binaries/master/bitcoind + + - name: Upload results + uses: actions/upload-artifact@v4 + with: + name: result-nightly-32000 + path: ${{ runner.temp }}/output/results.json + + - name: Upload debug logs + uses: actions/upload-artifact@v4 + with: + name: debug-logs-nightly-32000 + path: ${{ runner.temp }}/output/*-debug.log + if-no-files-found: ignore + + publish: + needs: benchmark-32000 + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - name: Checkout gh-pages + uses: actions/checkout@v4 + with: + ref: gh-pages + + - name: Checkout benchcoin tools + uses: actions/checkout@v4 + with: + ref: master + path: benchcoin-tools + + - name: Download commit info + uses: actions/download-artifact@v4 + with: + name: commit-info + path: ./commit-info + + - name: Download 450 results + uses: actions/download-artifact@v4 + with: + name: result-nightly-450 + path: ./nightly-450-results + + - name: Download 32000 results + uses: actions/download-artifact@v4 + with: + name: result-nightly-32000 + path: ./nightly-32000-results + + - name: Download machine specs + uses: actions/download-artifact@v4 + with: + name: machine-specs + path: ./machine-specs + + - name: Install Nix + uses: cachix/install-nix-action@v31 + + - name: Get dates and trigger + run: | + # Commit date (for chart X-axis) + COMMIT_DATE=$(cat ./commit-info/commit-date.txt) + echo "COMMIT_DATE=$COMMIT_DATE" >> "$GITHUB_ENV" + # Run date (for reference) + echo "RUN_DATE=$(date -u +%Y-%m-%d)" >> "$GITHUB_ENV" + # Trigger type (scheduled nightly vs manual dispatch) + if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then + echo "TRIGGER=manual" >> "$GITHUB_ENV" + else + echo "TRIGGER=scheduled" >> "$GITHUB_ENV" + fi + + - name: Append results to history + run: | + COMMIT=$(cat ./commit-info/commit.txt) + + # Append 450 (default dbcache) result + nix develop ./benchcoin-tools --command python3 benchcoin-tools/bench.py nightly \ + --history-file ./nightly-history.json \ + append \ + ./nightly-450-results/results.json \ + "$COMMIT" \ + 450 \ + --date "$COMMIT_DATE" \ + --run-date "$RUN_DATE" \ + --trigger "$TRIGGER" \ + --benchmark-config benchcoin-tools/bench/configs/nightly.toml \ + --machine-specs ./machine-specs/machine-specs.json + + # Append 32000 (large dbcache) result + nix develop ./benchcoin-tools --command python3 benchcoin-tools/bench.py nightly \ + --history-file ./nightly-history.json \ + append \ + ./nightly-32000-results/results.json \ + "$COMMIT" \ + 32000 \ + --date "$COMMIT_DATE" \ + --run-date "$RUN_DATE" \ + --trigger "$TRIGGER" \ + --benchmark-config benchcoin-tools/bench/configs/nightly.toml \ + --machine-specs ./machine-specs/machine-specs.json + + - name: Generate chart + run: | + nix develop ./benchcoin-tools --command python3 benchcoin-tools/bench.py nightly \ + --history-file ./nightly-history.json \ + chart \ + ./index.html + + - name: Commit and push to gh-pages + run: | + git config --global user.name "github-actions[bot]" + git config --global user.email "github-actions[bot]@users.noreply.github.com" + git add nightly-history.json index.html + git commit -m "Update nightly benchmark results for $COMMIT_DATE" || echo "No changes to commit" + git push origin gh-pages diff --git a/.github/workflows/publish-results.yml b/.github/workflows/publish-results.yml new file mode 100644 index 000000000000..27a619527df1 --- /dev/null +++ b/.github/workflows/publish-results.yml @@ -0,0 +1,195 @@ +name: Publish Results +on: + workflow_run: + workflows: ["Benchmark"] + types: [completed] +jobs: + build: + runs-on: ubuntu-latest + if: ${{ github.event.workflow_run.conclusion == 'success' }} + permissions: + actions: read + contents: write + checks: read + env: + # Matrix entries from configs/pr.toml + pr-noassumevalid.toml + NETWORKS: "450-uninstrumented,450-instrumented,32000-uninstrumented,32000-instrumented,noav-450-uninstrumented,noav-32000-uninstrumented" + outputs: + comparison: ${{ steps.generate.outputs.comparison }} + pr-number: ${{ steps.metadata.outputs.pr-number }} + result-url: ${{ steps.generate.outputs.result-url }} + pages-commit: ${{ steps.push-pages.outputs.pages-commit }} + steps: + - uses: actions/checkout@v4 + with: + ref: gh-pages + + - name: Checkout benchcoin tools + uses: actions/checkout@v4 + with: + ref: master + path: benchcoin-tools + + - name: Download artifacts + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + gh run download ${{ github.event.workflow_run.id }} --repo ${{ github.repository }} + + - name: Extract artifacts + run: | + for network in ${NETWORKS//,/ }; do + # Create network-specific directories with results + if [ -d "result-${network}" ]; then + mkdir -p "${network}-results" + mv "result-${network}/results.json" "${network}-results/" + fi + + # Copy flamegraphs into network results directory + if [ -d "flamegraph-${network}" ]; then + cp -r "flamegraph-${network}"/* "${network}-results/" 2>/dev/null || true + fi + + # Copy debug logs into network results directory (needed for plot generation) + if [ -d "debug-logs-${network}" ]; then + cp -r "debug-logs-${network}"/* "${network}-results/" 2>/dev/null || true + fi + + # Keep metadata separate for extraction + if [ -d "run-metadata-${network}" ]; then + mkdir -p "${network}-metadata" + mv "run-metadata-${network}"/* "${network}-metadata/" + fi + done + + - name: Extract metadata + id: metadata + run: | + # Find PR number, run ID, and commit from any available metadata + for network in ${NETWORKS//,/ }; do + if [ -f "${network}-metadata/github.json" ]; then + PR_NUMBER=$(jq -r '.event.pull_request.number // "main"' "${network}-metadata/github.json") + RUN_ID=$(jq -r '.run_id' "${network}-metadata/github.json") + HEAD_SHA=$(jq -r '.event.pull_request.head.sha // .sha' "${network}-metadata/github.json") + echo "pr-number=${PR_NUMBER}" >> $GITHUB_OUTPUT + echo "run-id=${RUN_ID}" >> $GITHUB_OUTPUT + echo "head-sha=${HEAD_SHA}" >> $GITHUB_OUTPUT + echo "Found metadata: PR=${PR_NUMBER}, Run=${RUN_ID}, Commit=${HEAD_SHA}" + break + fi + done + + - name: Install Nix + uses: cachix/install-nix-action@v31 + + - name: Generate report + id: generate + env: + PR_NUMBER: ${{ steps.metadata.outputs.pr-number }} + RUN_ID: ${{ steps.metadata.outputs.run-id }} + HEAD_SHA: ${{ steps.metadata.outputs.head-sha }} + run: | + # Build network arguments + NETWORK_ARGS="" + for network in ${NETWORKS//,/ }; do + if [ -d "./${network}-results" ]; then + NETWORK_ARGS="${NETWORK_ARGS} --network ${network}:./${network}-results" + fi + done + + # Generate report with nightly comparison (use per-machine history file) + nix develop ./benchcoin-tools --command python3 benchcoin-tools/bench.py report \ + ${NETWORK_ARGS} \ + --pr-number "${PR_NUMBER}" \ + --run-id "${RUN_ID}" \ + --commit "${HEAD_SHA}" \ + --nightly-history "./nightly-history.json" \ + --update-index \ + "./results/pr-${PR_NUMBER}/${RUN_ID}" + + # Build comparison summary for PR comment + if [ -f "./nightly-history.json" ]; then + COMPARISON=$(jq -r ' + if .nightly_comparison then + .nightly_comparison | to_entries | map( + "\(.key) MB: \(.value.pr_mean / 60 | floor) min" + + if .value.nightly_mean then + " (nightly median of \(.value.nightly_count): \(.value.nightly_mean / 60 | floor) min, \(.value.nightly_date_range)) → " + + if .value.speedup_percent > 0 then "+\(.value.speedup_percent)% faster" + elif .value.speedup_percent < 0 then "\(.value.speedup_percent)% slower" + else "same" + end + else " (no nightly baseline)" + end + ) | join("\n- ") + else "No comparison data available" + end + ' "./results/pr-${PR_NUMBER}/${RUN_ID}/results.json") + else + COMPARISON="No nightly history available for comparison" + fi + echo "comparison<> $GITHUB_OUTPUT + echo "${COMPARISON}" >> $GITHUB_OUTPUT + echo "EOF" >> $GITHUB_OUTPUT + + RESULT_URL="https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/results/pr-${PR_NUMBER}/${RUN_ID}/index.html" + echo "result-url=${RESULT_URL}" >> $GITHUB_OUTPUT + + - name: Upload Pages artifact + uses: actions/upload-pages-artifact@v3 + with: + path: results + + - name: Commit and push to gh-pages + id: push-pages + run: | + git config --global user.name "github-actions[bot]" + git config --global user.email "github-actions[bot]@users.noreply.github.com" + # Note: Only add results/ directory, not root index.html + # The root index.html is managed by nightly-benchmark.yml + git add results/ + git commit -m "Update benchmark results from run ${{ github.event.workflow_run.id }}" + git push origin gh-pages + echo "pages-commit=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT + + wait-for-pages: + needs: build + runs-on: ubuntu-latest + steps: + - name: Wait for GitHub Pages deployment + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + COMMIT="${{ needs.build.outputs.pages-commit }}" + for i in $(seq 1 60); do + RUN_ID=$(gh run list -R "${{ github.repository }}" -w "pages-build-deployment" --commit "$COMMIT" -L 1 --json databaseId -q '.[0].databaseId') + if [ -n "$RUN_ID" ]; then + echo "Found pages-build-deployment run ${RUN_ID} for commit ${COMMIT}" + gh run watch "$RUN_ID" -R "${{ github.repository }}" + exit 0 + fi + sleep 5 + done + echo "::warning::Could not find pages-build-deployment run for commit ${COMMIT}" + + comment-pr: + needs: [build, wait-for-pages] + runs-on: ubuntu-latest + permissions: + pull-requests: write + actions: read + steps: + - name: Comment on PR + if: ${{ needs.build.outputs.pr-number != 'main' }} + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + gh pr comment ${{ needs.build.outputs.pr-number }} \ + --repo ${{ github.repository }} \ + --body "## Benchmark Results + + **Comparison to nightly master (median of last 7 runs):** + - ${{ needs.build.outputs.comparison }} + + [View detailed results](${{ needs.build.outputs.result-url }}) + [View nightly trend chart](https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/)" diff --git a/.github/workflows/rebase.yml b/.github/workflows/rebase.yml new file mode 100644 index 000000000000..30c6f045fbc4 --- /dev/null +++ b/.github/workflows/rebase.yml @@ -0,0 +1,35 @@ +name: Nightly Rebase + +on: + schedule: + - cron: '0 1 * * *' # 01:00 GMT daily + workflow_dispatch: # manual trigger + +permissions: + contents: write + +jobs: + rebase: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + ref: master + fetch-depth: 0 # Full history needed for rebase + token: ${{ secrets.REBASE_PAT }} + + - name: Configure git + run: | + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + + - name: Add upstream and rebase + run: | + git remote add upstream https://github.com/bitcoin/bitcoin.git + git fetch upstream master + git rebase upstream/master + + - name: Push changes + run: | + git push --force origin master diff --git a/bench.py b/bench.py new file mode 100755 index 000000000000..d696b115893a --- /dev/null +++ b/bench.py @@ -0,0 +1,611 @@ +#!/usr/bin/env python3 +"""Benchcoin - Bitcoin Core benchmarking toolkit. + +A CLI for building, benchmarking, analyzing, and reporting on Bitcoin Core +performance. PR results are compared against nightly baseline data. + +Usage: + bench.py build COMMIT Build bitcoind at a commit + bench.py run NAME:BINARY Benchmark a binary + bench.py analyze COMMIT LOGFILE Generate plots from debug.log + bench.py report OUTPUT Generate HTML report with nightly comparison + bench.py nightly append ... Append result to nightly history + bench.py nightly chart ... Generate nightly chart HTML + +Examples: + # Build at HEAD + bench.py build HEAD:pr + + # Benchmark built binary + bench.py run pr:./binaries/pr/bitcoind --datadir /data + + # Generate HTML report with nightly comparison + bench.py report --network 450-uninstrumented:./results --nightly-history ./nightly-history.json ./output + + # Append nightly result and regenerate chart + bench.py nightly append results.json abc123 450 --benchmark-config bench/configs/nightly.toml + bench.py nightly chart ./index.html +""" + +from __future__ import annotations + +import argparse +import logging +import sys +from pathlib import Path + +from bench.capabilities import detect_capabilities +from bench.config import build_config + +logging.basicConfig( + level=logging.INFO, + format="%(levelname)s: %(message)s", +) +logger = logging.getLogger(__name__) + + +def cmd_build(args: argparse.Namespace) -> int: + """Build bitcoind at a commit.""" + from bench.build import BuildPhase + + capabilities = detect_capabilities() + config = build_config( + cli_args={ + "binaries_dir": args.output_dir, + "skip_existing": args.skip_existing, + "dry_run": args.dry_run, + "verbose": args.verbose, + }, + config_file=Path(args.config) if args.config else None, + profile=args.profile, + ) + + if args.verbose: + logging.getLogger().setLevel(logging.DEBUG) + + phase = BuildPhase(config, capabilities) + + try: + result = phase.run( + args.commit, + output_dir=Path(args.output_dir) if args.output_dir else None, + ) + logger.info(f"Built binary: {result.binary.name} at {result.binary.path}") + return 0 + except Exception as e: + logger.error(f"Build failed: {e}") + return 1 + + +def cmd_run(args: argparse.Namespace) -> int: + """Run benchmark on a binary.""" + from bench.benchmark import BenchmarkPhase, parse_binary_spec + from bench.benchmark_config import BenchmarkConfig + + capabilities = detect_capabilities() + + # Load benchmark config + benchmark_config = BenchmarkConfig.from_toml(Path(args.benchmark_config)) + + # Validate benchmark config + errors = benchmark_config.validate() + if errors: + for error in errors: + logger.error(f"Config error: {error}") + return 1 + + # Get matrix entry + matrix_entry = benchmark_config.get_matrix_entry(args.matrix_entry) + if not matrix_entry: + available = benchmark_config.get_matrix_names() + logger.error( + f"Matrix entry '{args.matrix_entry}' not found. " + f"Available: {', '.join(available)}" + ) + return 1 + logger.info(f"Using matrix entry: {matrix_entry}") + + # In full IBD mode, ignore datadir (sync from genesis) + datadir = None if benchmark_config.full_ibd else args.datadir + + # Build config with CLI args and benchmark config values + cli_args: dict = { + "datadir": datadir, + "tmp_datadir": args.tmp_datadir, + "output_dir": args.output_dir, + "no_cache_drop": args.no_cache_drop, + "dry_run": args.dry_run, + "verbose": args.verbose, + "runs": benchmark_config.runs, + } + + # Apply matrix entry values + if "dbcache" in matrix_entry: + cli_args["dbcache"] = matrix_entry["dbcache"] + if "instrumentation" in matrix_entry: + cli_args["instrumented"] = matrix_entry["instrumentation"] + + config = build_config( + cli_args=cli_args, + config_file=Path(args.config) if args.config else None, + profile=args.profile, + ) + + if args.verbose: + logging.getLogger().setLevel(logging.DEBUG) + + errors = config.validate() + if errors: + for error in errors: + logger.error(error) + return 1 + + # Parse binary spec + try: + binary = parse_binary_spec(args.binary) + except ValueError as e: + logger.error(str(e)) + return 1 + + # Validate binary exists + name, path = binary + if not path.exists(): + logger.error(f"Binary not found: {path} ({name})") + return 1 + + phase = BenchmarkPhase(config, capabilities, benchmark_config) + output_dir = Path(config.output_dir) + + try: + result = phase.run( + binary=binary, + datadir=Path(config.datadir) if config.datadir else None, + output_dir=output_dir, + ) + logger.info(f"Results saved to: {result.results_file}") + + # For instrumented runs, also generate plots + if config.instrumented == "instrumented" and result.debug_log: + from bench.analyze import AnalyzePhase + + analyze_phase = AnalyzePhase() + try: + analyze_phase.run( + commit=result.name, + log_file=result.debug_log, + output_dir=output_dir / "plots", + ) + except Exception as e: + logger.warning(f"Analysis failed: {e}") + + return 0 + except Exception as e: + logger.error(f"Benchmark failed: {e}") + if args.verbose: + import traceback + + traceback.print_exc() + return 1 + + +def cmd_analyze(args: argparse.Namespace) -> int: + """Generate plots from debug.log.""" + from bench.analyze import AnalyzePhase + + if args.verbose: + logging.getLogger().setLevel(logging.DEBUG) + + log_file = Path(args.log_file) + output_dir = Path(args.output_dir) + + if not log_file.exists(): + logger.error(f"Log file not found: {log_file}") + return 1 + + phase = AnalyzePhase() + + try: + result = phase.run( + commit=args.commit, + log_file=log_file, + output_dir=output_dir, + ) + logger.info(f"Generated {len(result.plots)} plots in {result.output_dir}") + return 0 + except Exception as e: + logger.error(f"Analysis failed: {e}") + if args.verbose: + import traceback + + traceback.print_exc() + return 1 + + +def cmd_report(args: argparse.Namespace) -> int: + """Generate HTML report from benchmark results.""" + from bench.report import ReportPhase + + if args.verbose: + logging.getLogger().setLevel(logging.DEBUG) + + output_dir = Path(args.output_dir) + nightly_history_file = Path(args.nightly_history) if args.nightly_history else None + phase = ReportPhase(nightly_history_file=nightly_history_file) + + try: + # CI multi-network mode + if args.networks: + network_dirs = {} + for spec in args.networks: + if ":" not in spec: + logger.error(f"Invalid network spec '{spec}': must be NETWORK:PATH") + return 1 + network, path = spec.split(":", 1) + network_dirs[network] = Path(path) + + # Validate directories exist + for network, path in network_dirs.items(): + if not path.exists(): + logger.error(f"Network directory not found: {path} ({network})") + return 1 + + result = phase.run_multi_network( + network_dirs=network_dirs, + output_dir=output_dir, + title=args.title or "Benchmark Results", + pr_number=args.pr_number, + run_id=args.run_id, + commit=args.commit, + ) + + # Update results index if we have a results directory + # Note: This writes to results/index.html, not the main index.html + # The main index.html is generated by the nightly benchmark chart + if args.update_index: + results_base = output_dir.parent.parent # Go up from pr-N/run-id + if results_base.exists(): + phase.update_index(results_base, results_base / "index.html") + else: + # Standard single-directory mode + input_dir = Path(args.input_dir) + + if not input_dir.exists(): + logger.error(f"Input directory not found: {input_dir}") + return 1 + + result = phase.run( + input_dir=input_dir, + output_dir=output_dir, + title=args.title or "Benchmark Results", + ) + + # Print nightly comparison (speedups vs nightly) + if result.speedups: + logger.info("Comparison to nightly:") + for config, speedup in result.speedups.items(): + sign = "+" if speedup > 0 else "" + logger.info(f" {config}: {sign}{speedup}%") + + return 0 + except Exception as e: + logger.error(f"Report generation failed: {e}") + if args.verbose: + import traceback + + traceback.print_exc() + return 1 + + +def cmd_nightly(args: argparse.Namespace) -> int: + """Manage nightly benchmark history and charts.""" + from bench.nightly import NightlyPhase + + if args.verbose: + logging.getLogger().setLevel(logging.DEBUG) + + if not args.nightly_command: + logger.error("No nightly subcommand specified. Use 'append' or 'chart'.") + return 1 + + history_file = Path(args.history_file) + phase = NightlyPhase(history_file) + + try: + if args.nightly_command == "append": + benchmark_config_file = ( + Path(args.benchmark_config) if args.benchmark_config else None + ) + machine_specs_file = ( + Path(args.machine_specs) if args.machine_specs else None + ) + phase.append( + results_file=Path(args.results_file), + commit=args.commit, + dbcache=args.dbcache, + date_str=args.date, + benchmark_config_file=benchmark_config_file, + instrumentation=args.instrumentation, + machine_specs_file=machine_specs_file, + run_date=args.run_date or "", + trigger=args.trigger, + ) + logger.info(f"Appended result to {history_file}") + elif args.nightly_command == "chart": + phase.chart(output_file=Path(args.output_file)) + logger.info(f"Generated chart at {args.output_file}") + return 0 + except Exception as e: + logger.error(f"Nightly operation failed: {e}") + if args.verbose: + import traceback + + traceback.print_exc() + return 1 + + +def main() -> int: + """Main entry point.""" + parser = argparse.ArgumentParser( + description="Benchcoin - Bitcoin Core benchmarking toolkit", + formatter_class=argparse.RawDescriptionHelpFormatter, + epilog=__doc__, + ) + + parser.add_argument( + "--config", + metavar="PATH", + help="Config file (default: bench.toml)", + ) + parser.add_argument( + "--profile", + choices=["quick", "full", "ci"], + default="full", + help="Configuration profile (default: full)", + ) + parser.add_argument( + "-v", + "--verbose", + action="store_true", + help="Verbose output", + ) + parser.add_argument( + "--dry-run", + action="store_true", + help="Show what would be done without executing", + ) + + subparsers = parser.add_subparsers(dest="command", help="Commands") + + # Build command + build_parser = subparsers.add_parser( + "build", + help="Build bitcoind at a commit", + description="Build bitcoind binary from a git commit. " + "Optionally provide a name suffix: COMMIT:NAME", + ) + build_parser.add_argument( + "commit", + metavar="COMMIT[:NAME]", + help="Commit to build. Format: COMMIT or COMMIT:NAME (e.g., HEAD:pr, abc123:test)", + ) + build_parser.add_argument( + "-o", + "--output-dir", + metavar="PATH", + help="Where to store binaries (default: ./binaries)", + ) + build_parser.add_argument( + "--skip-existing", + action="store_true", + help="Skip build if binary already exists", + ) + build_parser.set_defaults(func=cmd_build) + + # Run command + run_parser = subparsers.add_parser( + "run", + help="Run benchmark on a binary", + description="Benchmark a bitcoind binary using hyperfine.", + ) + run_parser.add_argument( + "binary", + metavar="NAME:PATH", + help="Binary to benchmark. Format: NAME:PATH (e.g., pr:./binaries/pr/bitcoind)", + ) + run_parser.add_argument( + "--datadir", + metavar="PATH", + help="Source datadir with blockchain snapshot (omit for fresh sync)", + ) + run_parser.add_argument( + "--tmp-datadir", + metavar="PATH", + help="Temp datadir for benchmark runs", + ) + run_parser.add_argument( + "-o", + "--output-dir", + metavar="PATH", + help="Output directory for results (default: ./bench-output)", + ) + run_parser.add_argument( + "--no-cache-drop", + action="store_true", + help="Skip cache dropping between runs", + ) + run_parser.add_argument( + "--benchmark-config", + required=True, + metavar="PATH", + help="Benchmark config TOML file (e.g., bench/configs/pr.toml)", + ) + run_parser.add_argument( + "--matrix-entry", + required=True, + metavar="NAME", + help="Matrix entry to run (e.g., '450-uninstrumented', '32000-instrumented')", + ) + run_parser.set_defaults(func=cmd_run) + + # Analyze command + analyze_parser = subparsers.add_parser( + "analyze", help="Generate plots from debug.log" + ) + analyze_parser.add_argument("commit", help="Commit hash (for naming)") + analyze_parser.add_argument("log_file", help="Path to debug.log") + analyze_parser.add_argument( + "--output-dir", + default="./plots", + metavar="PATH", + help="Output directory for plots", + ) + analyze_parser.set_defaults(func=cmd_analyze) + + # Report command + report_parser = subparsers.add_parser( + "report", + help="Generate HTML report", + description="Generate HTML report from benchmark results. " + "Use --network for multi-network CI reports.", + ) + report_parser.add_argument( + "input_dir", + nargs="?", + help="Directory with results.json (for single-network mode)", + ) + report_parser.add_argument("output_dir", help="Output directory for report") + report_parser.add_argument( + "--title", + help="Report title", + ) + # CI multi-network options + report_parser.add_argument( + "--network", + dest="networks", + action="append", + metavar="NAME:PATH", + help="Network results directory (repeatable, e.g., --network mainnet:./mainnet-results)", + ) + report_parser.add_argument( + "--pr-number", + metavar="N", + help="PR number (for CI reports)", + ) + report_parser.add_argument( + "--run-id", + metavar="ID", + help="Run ID (for CI reports)", + ) + report_parser.add_argument( + "--update-index", + action="store_true", + help="Update main index.html (for CI reports)", + ) + report_parser.add_argument( + "--nightly-history", + metavar="PATH", + help="Path to nightly-history.json for comparison against nightly baseline", + ) + report_parser.add_argument( + "--commit", + metavar="SHA", + help="PR commit hash (for chart display)", + ) + report_parser.set_defaults(func=cmd_report) + + # Nightly command + nightly_parser = subparsers.add_parser( + "nightly", + help="Manage nightly benchmark history and charts", + description="Commands for managing nightly benchmark results history " + "and generating the historical trend chart.", + ) + nightly_parser.add_argument( + "--history-file", + default="nightly-history.json", + metavar="PATH", + help="Path to nightly history JSON file (default: nightly-history.json)", + ) + nightly_subparsers = nightly_parser.add_subparsers( + dest="nightly_command", help="Nightly commands" + ) + + # nightly append + nightly_append = nightly_subparsers.add_parser( + "append", + help="Append a result to the nightly history", + description="Parse a hyperfine results.json file and append the result " + "to the nightly history JSON file. Machine specs are automatically captured.", + ) + nightly_append.add_argument( + "results_file", + help="Path to hyperfine results.json file", + ) + nightly_append.add_argument( + "commit", + help="Git commit hash", + ) + nightly_append.add_argument( + "dbcache", + type=int, + help="DB cache size in MB (450 or 32000)", + ) + nightly_append.add_argument( + "--date", + metavar="YYYY-MM-DD", + help="Date for this result (default: today)", + ) + nightly_append.add_argument( + "--benchmark-config", + metavar="PATH", + help="Benchmark config TOML file to store with results", + ) + nightly_append.add_argument( + "--instrumentation", + default="uninstrumented", + choices=["uninstrumented", "instrumented"], + help="Instrumentation mode (default: uninstrumented)", + ) + nightly_append.add_argument( + "--machine-specs", + metavar="PATH", + help="Path to pre-captured machine specs JSON (default: detect current machine)", + ) + nightly_append.add_argument( + "--run-date", + metavar="YYYY-MM-DD", + help="Date when benchmark was executed (default: today). Stored for reference.", + ) + nightly_append.add_argument( + "--trigger", + default="scheduled", + choices=["scheduled", "manual"], + help="How the benchmark was triggered (default: scheduled). " + "Scheduled runs dedup by commit; manual runs are always kept.", + ) + + # nightly chart + nightly_chart = nightly_subparsers.add_parser( + "chart", + help="Generate the nightly trend chart HTML", + description="Generate an HTML page with an interactive Plotly chart " + "showing nightly benchmark results over time.", + ) + nightly_chart.add_argument( + "output_file", + help="Path to write the chart HTML (typically index.html)", + ) + + nightly_parser.set_defaults(func=cmd_nightly) + + args = parser.parse_args() + + if not args.command: + parser.print_help() + return 1 + + return args.func(args) + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/bench/README.md b/bench/README.md new file mode 100644 index 000000000000..e41bc7256f4b --- /dev/null +++ b/bench/README.md @@ -0,0 +1,205 @@ +# Benchcoin + +A CLI for benchmarking Bitcoin Core IBD (Initial Block Download). + +## Quick Start + +```bash +# Quick smoke test on signet (requires nix) +nix develop --command python3 bench.py build HEAD:test +nix develop --command python3 bench.py run \ + --benchmark-config bench/configs/test-signet.toml \ + --matrix-entry 450 \ + --datadir /path/to/signet-datadir \ + --output-dir ./output \ + test:./binaries/test/bitcoind + +# Or use just +just test-uninstrumented HEAD /path/to/signet-datadir +``` + +## Requirements + +- **Nix** with flakes enabled (provides hyperfine, flamegraph, etc.) +- A blockchain datadir snapshot to benchmark against + +Optional (auto-detected, gracefully degrades without): +- `/run/wrappers/bin/drop-caches` (NixOS) - clears page cache between runs + +## Commands + +``` +bench.py [GLOBAL_OPTIONS] COMMAND [OPTIONS] ARGS + +Global Options: + --profile {quick,full,ci} Configuration profile + --config PATH Custom config file + -v, --verbose Verbose output + --dry-run Show what would run + +Commands: + build Build bitcoind at a commit + run Run benchmark (requires pre-built binary + TOML config) + analyze Generate plots from debug.log + report Generate HTML report + nightly Manage nightly history + generate chart +``` + +### build + +Build a bitcoind binary at a commit: + +```bash +python3 bench.py build HEAD:pr +python3 bench.py build -o /tmp/bins abc123:test +python3 bench.py build --skip-existing HEAD:pr +``` + +### run + +Run a benchmark using a TOML config and matrix entry: + +```bash +python3 bench.py run \ + --benchmark-config bench/configs/pr.toml \ + --matrix-entry 450-uninstrumented \ + --datadir /data/pruned-840k \ + --output-dir ./output \ + pr:./binaries/pr/bitcoind +``` + +Options: +- `--benchmark-config PATH` - TOML config file (required) +- `--matrix-entry NAME` - Matrix entry to run (required) +- `--datadir PATH` - Blockchain datadir snapshot to copy for each run +- `--tmp-datadir PATH` - Working directory for benchmark runs +- `-o, --output-dir PATH` - Output directory for results +- `--no-cache-drop` - Skip clearing page cache between runs + +### analyze + +Generate plots from a debug.log file: + +```bash +python3 bench.py analyze abc123 /path/to/debug.log --output-dir ./plots +``` + +Generates PNG plots for block processing rate, cache usage, transaction counts, LevelDB compaction, and CoinDB write batches. + +### report + +Generate an HTML report from benchmark results: + +```bash +# Single directory +python3 bench.py report ./bench-output ./report + +# Multi-network (CI mode) +python3 bench.py report \ + --network 450-uninstrumented:./results/450 \ + --network 32000-uninstrumented:./results/32000 \ + --nightly-history ./nightly-history.json \ + --pr-number 123 --run-id abc \ + ./output +``` + +### nightly + +Manage nightly benchmark history: + +```bash +# Append a result +python3 bench.py nightly --history-file history.json append \ + results.json COMMIT 450 \ + --benchmark-config bench/configs/nightly.toml \ + --machine-specs machine-specs.json + +# Generate chart +python3 bench.py nightly --history-file history.json chart index.html +``` + +## Benchmark Configs + +Benchmarks are driven by TOML config files in `bench/configs/`: + +| File | Chain | Matrix Entries | Use Case | +|------|-------|----------------|----------| +| `pr.toml` | mainnet | 450/32000 x uninstrumented/instrumented | PR comparison | +| `nightly.toml` | mainnet | 450, 32000 | Nightly baseline | +| `test-signet.toml` | signet | 450 | Quick local smoke test | + +Configs use `start_height = 840000` (resuming from a pruned snapshot) with `runs = 2` (except signet which starts from 0 with `runs = 1`). + +### Matrix Expansion + +The `[bitcoind.matrix]` section defines parameter axes. Their cartesian product generates named entries: + +```toml +[bitcoind.matrix] +dbcache = [450, 32000] +instrumentation = ["uninstrumented", "instrumented"] +# Produces: 450-uninstrumented, 450-instrumented, 32000-uninstrumented, 32000-instrumented +``` + +Select one with `--matrix-entry`. + +## Justfile Recipes + +```bash +just test-instrumented HEAD /path/to/datadir # Signet smoke test with flamegraphs +just test-uninstrumented HEAD /path/to/datadir # Signet smoke test without profiling +just instrumented HEAD /path/to/datadir # Full instrumented benchmark +just build HEAD:pr # Build only +just run /path/to/datadir pr:./binaries/pr/bitcoind # Run with pre-built binary +just analyze COMMIT debug.log ./plots +just report ./input ./output --nightly-history ./nightly-history.json +``` + +## Architecture + +``` +bench.py CLI entry point (argparse) +bench/ +├── config.py Layered configuration (TOML + env + CLI) +├── benchmark_config.py TOML config loader + matrix expansion +├── capabilities.py System capability detection +├── build.py Build phase (nix build) +├── benchmark.py Benchmark phase (hyperfine) +├── analyze.py Plot generation (matplotlib) +├── report.py HTML report generation +├── nightly.py Nightly history + chart generation +└── utils.py Git operations, datadir management +``` + +### Hyperfine Integration + +The benchmark phase generates shell scripts for hyperfine hooks: + +- `setup` - Clean tmp datadir (once before all runs) +- `prepare` - Copy snapshot, drop caches, clean logs (before each run) +- `cleanup` - Clean tmp datadir (after all runs) +- `conclude` - Collect flamegraph/logs (instrumented only) + +### Instrumented Mode + +When `instrumentation = "instrumented"` in the matrix: + +1. Wraps bitcoind in `flamegraph` for CPU profiling +2. Enables debug logging: `coindb`, `leveldb`, `bench`, `validation` +3. Forces `runs=1` (profiling overhead makes multiple runs pointless) +4. Generates flamegraph SVGs and performance plots + +## CI Integration + +GitHub Actions workflows call bench.py directly: + +```yaml +- run: | + nix develop --command python3 bench.py run \ + --benchmark-config bench/configs/pr.toml \ + --matrix-entry ${{ matrix.name }} \ + --datadir $ORIGINAL_DATADIR \ + --tmp-datadir ${{ runner.temp }}/datadir \ + --output-dir ${{ runner.temp }}/output \ + pr:${{ runner.temp }}/binaries/pr/bitcoind +``` diff --git a/bench/__init__.py b/bench/__init__.py new file mode 100644 index 000000000000..cb50424b155c --- /dev/null +++ b/bench/__init__.py @@ -0,0 +1,3 @@ +"""Benchcoin - Bitcoin Core benchmarking toolkit.""" + +__version__ = "0.1.0" diff --git a/bench/analyze.py b/bench/analyze.py new file mode 100644 index 000000000000..c4af916a8d68 --- /dev/null +++ b/bench/analyze.py @@ -0,0 +1,538 @@ +"""Analyze phase - parse debug.log and generate performance plots. + +Refactored from bench-ci/parse_and_plot.py for better structure and reusability. +""" + +from __future__ import annotations + +import datetime +import logging +import re +from collections import OrderedDict +from dataclasses import dataclass +from pathlib import Path + +# matplotlib is optional - gracefully handle if not installed +try: + import matplotlib.pyplot as plt + + HAS_MATPLOTLIB = True +except ImportError: + HAS_MATPLOTLIB = False + +logger = logging.getLogger(__name__) + +# Bitcoin fork heights for plot annotations +FORK_HEIGHTS = OrderedDict( + [ + ("BIP34", 227931), # Block v2, coinbase includes height + ("BIP66", 363725), # Strict DER signatures + ("BIP65", 388381), # OP_CHECKLOCKTIMEVERIFY + ("CSV", 419328), # BIP68, 112, 113 - OP_CHECKSEQUENCEVERIFY + ("Segwit", 481824), # BIP141, 143, 144, 145 - Segregated Witness + ("Taproot", 709632), # BIP341, 342 - Schnorr signatures & Taproot + ("Halving 1", 210000), # First halving + ("Halving 2", 420000), # Second halving + ("Halving 3", 630000), # Third halving + ("Halving 4", 840000), # Fourth halving + ] +) + +FORK_COLORS = { + "BIP34": "blue", + "BIP66": "blue", + "BIP65": "blue", + "CSV": "blue", + "Segwit": "green", + "Taproot": "red", + "Halving 1": "purple", + "Halving 2": "purple", + "Halving 3": "purple", + "Halving 4": "purple", +} + +FORK_STYLES = { + "BIP34": "--", + "BIP66": "--", + "BIP65": "--", + "CSV": "--", + "Segwit": "--", + "Taproot": "--", + "Halving 1": ":", + "Halving 2": ":", + "Halving 3": ":", + "Halving 4": ":", +} + + +@dataclass +class UpdateTipEntry: + """Parsed UpdateTip log entry.""" + + timestamp: datetime.datetime + height: int + tx_count: int + cache_size_mb: float + cache_coins_count: int + + +@dataclass +class LevelDBCompactEntry: + """Parsed LevelDB compaction log entry.""" + + timestamp: datetime.datetime + + +@dataclass +class LevelDBGenTableEntry: + """Parsed LevelDB generated table log entry.""" + + timestamp: datetime.datetime + keys_count: int + bytes_count: int + + +@dataclass +class ValidationTxAddEntry: + """Parsed validation transaction added log entry.""" + + timestamp: datetime.datetime + + +@dataclass +class CoinDBWriteBatchEntry: + """Parsed coindb write batch log entry.""" + + timestamp: datetime.datetime + is_partial: bool + size_mb: float + + +@dataclass +class CoinDBCommitEntry: + """Parsed coindb commit log entry.""" + + timestamp: datetime.datetime + txout_count: int + + +@dataclass +class ParsedLog: + """All parsed data from a debug.log file.""" + + update_tip: list[UpdateTipEntry] + leveldb_compact: list[LevelDBCompactEntry] + leveldb_gen_table: list[LevelDBGenTableEntry] + validation_txadd: list[ValidationTxAddEntry] + coindb_write_batch: list[CoinDBWriteBatchEntry] + coindb_commit: list[CoinDBCommitEntry] + + +@dataclass +class AnalyzeResult: + """Result of the analyze phase.""" + + commit: str + output_dir: Path + plots: list[Path] + + +class LogParser: + """Parse bitcoind debug.log files.""" + + # Regex patterns + UPDATETIP_RE = re.compile( + r"^([\d\-:TZ]+) UpdateTip: new best.+height=(\d+).+tx=(\d+).+cache=([\d.]+)MiB\((\d+)txo\)" + ) + LEVELDB_COMPACT_RE = re.compile(r"^([\d\-:TZ]+) \[leveldb] Compacting.*files") + LEVELDB_GEN_TABLE_RE = re.compile( + r"^([\d\-:TZ]+) \[leveldb] Generated table.*: (\d+) keys, (\d+) bytes" + ) + VALIDATION_TXADD_RE = re.compile( + r"^([\d\-:TZ]+) \[validation] TransactionAddedToMempool: txid=.+wtxid=.+" + ) + COINDB_WRITE_BATCH_RE = re.compile( + r"^([\d\-:TZ]+) \[coindb] Writing (partial|final) batch of ([\d.]+) MiB" + ) + COINDB_COMMIT_RE = re.compile( + r"^([\d\-:TZ]+) \[coindb] Committed (\d+) changed transaction outputs" + ) + + @staticmethod + def parse_timestamp(iso_str: str) -> datetime.datetime: + """Parse ISO 8601 timestamp from log.""" + return datetime.datetime.strptime(iso_str, "%Y-%m-%dT%H:%M:%SZ") + + def parse_file(self, log_file: Path) -> ParsedLog: + """Parse a debug.log file and extract all relevant data.""" + update_tip: list[UpdateTipEntry] = [] + leveldb_compact: list[LevelDBCompactEntry] = [] + leveldb_gen_table: list[LevelDBGenTableEntry] = [] + validation_txadd: list[ValidationTxAddEntry] = [] + coindb_write_batch: list[CoinDBWriteBatchEntry] = [] + coindb_commit: list[CoinDBCommitEntry] = [] + + with open(log_file, "r", encoding="utf-8") as f: + for line in f: + if match := self.UPDATETIP_RE.match(line): + iso_str, height, tx, cache_mb, coins = match.groups() + update_tip.append( + UpdateTipEntry( + timestamp=self.parse_timestamp(iso_str), + height=int(height), + tx_count=int(tx), + cache_size_mb=float(cache_mb), + cache_coins_count=int(coins), + ) + ) + elif match := self.LEVELDB_COMPACT_RE.match(line): + leveldb_compact.append( + LevelDBCompactEntry( + timestamp=self.parse_timestamp(match.group(1)) + ) + ) + elif match := self.LEVELDB_GEN_TABLE_RE.match(line): + iso_str, keys, bytes_count = match.groups() + leveldb_gen_table.append( + LevelDBGenTableEntry( + timestamp=self.parse_timestamp(iso_str), + keys_count=int(keys), + bytes_count=int(bytes_count), + ) + ) + elif match := self.VALIDATION_TXADD_RE.match(line): + validation_txadd.append( + ValidationTxAddEntry( + timestamp=self.parse_timestamp(match.group(1)) + ) + ) + elif match := self.COINDB_WRITE_BATCH_RE.match(line): + iso_str, batch_type, size_mb = match.groups() + coindb_write_batch.append( + CoinDBWriteBatchEntry( + timestamp=self.parse_timestamp(iso_str), + is_partial=(batch_type == "partial"), + size_mb=float(size_mb), + ) + ) + elif match := self.COINDB_COMMIT_RE.match(line): + iso_str, txout_count = match.groups() + coindb_commit.append( + CoinDBCommitEntry( + timestamp=self.parse_timestamp(iso_str), + txout_count=int(txout_count), + ) + ) + + return ParsedLog( + update_tip=update_tip, + leveldb_compact=leveldb_compact, + leveldb_gen_table=leveldb_gen_table, + validation_txadd=validation_txadd, + coindb_write_batch=coindb_write_batch, + coindb_commit=coindb_commit, + ) + + +class PlotGenerator: + """Generate performance plots from parsed log data.""" + + def __init__(self, commit: str, output_dir: Path): + self.commit = commit + self.output_dir = output_dir + self.generated_plots: list[Path] = [] + + if not HAS_MATPLOTLIB: + raise RuntimeError( + "matplotlib is required for plot generation. " + "Install with: pip install matplotlib" + ) + + def generate_all(self, data: ParsedLog) -> list[Path]: + """Generate all plots from parsed data.""" + if not data.update_tip: + logger.warning("No UpdateTip entries found, skipping plot generation") + return [] + + # Verify entries are sorted by time + for i in range(len(data.update_tip) - 1): + if data.update_tip[i].timestamp > data.update_tip[i + 1].timestamp: + logger.warning("UpdateTip entries are not sorted by time") + break + + # Extract base time for elapsed calculations + base_time = data.update_tip[0].timestamp + + # Extract data series + times = [e.timestamp for e in data.update_tip] + heights = [e.height for e in data.update_tip] + tx_counts = [e.tx_count for e in data.update_tip] + cache_sizes = [e.cache_size_mb for e in data.update_tip] + cache_counts = [e.cache_coins_count for e in data.update_tip] + elapsed_minutes = [(t - base_time).total_seconds() / 60 for t in times] + + # Generate core plots + self._plot( + elapsed_minutes, + heights, + "Elapsed minutes", + "Block Height", + "Block Height vs Time", + f"{self.commit}-height_vs_time.png", + ) + + self._plot( + heights, + cache_sizes, + "Block Height", + "Cache Size (MiB)", + "Cache Size vs Block Height", + f"{self.commit}-cache_vs_height.png", + is_height_based=True, + ) + + self._plot( + elapsed_minutes, + cache_sizes, + "Elapsed minutes", + "Cache Size (MiB)", + "Cache Size vs Time", + f"{self.commit}-cache_vs_time.png", + ) + + self._plot( + heights, + tx_counts, + "Block Height", + "Transaction Count", + "Transactions vs Block Height", + f"{self.commit}-tx_vs_height.png", + is_height_based=True, + ) + + self._plot( + heights, + cache_counts, + "Block Height", + "Coins Cache Size", + "Coins Cache Size vs Height", + f"{self.commit}-coins_cache_vs_height.png", + is_height_based=True, + ) + + # LevelDB plots + if data.leveldb_compact: + compact_minutes = [ + (e.timestamp - base_time).total_seconds() / 60 + for e in data.leveldb_compact + ] + self._plot( + compact_minutes, + [1] * len(compact_minutes), + "Elapsed minutes", + "LevelDB Compaction", + "LevelDB Compaction Events vs Time", + f"{self.commit}-leveldb_compact_vs_time.png", + ) + + if data.leveldb_gen_table: + gen_minutes = [ + (e.timestamp - base_time).total_seconds() / 60 + for e in data.leveldb_gen_table + ] + gen_keys = [e.keys_count for e in data.leveldb_gen_table] + gen_bytes = [e.bytes_count for e in data.leveldb_gen_table] + + self._plot( + gen_minutes, + gen_keys, + "Elapsed minutes", + "Number of keys", + "LevelDB Keys Generated vs Time", + f"{self.commit}-leveldb_gen_keys_vs_time.png", + ) + + self._plot( + gen_minutes, + gen_bytes, + "Elapsed minutes", + "Number of bytes", + "LevelDB Bytes Generated vs Time", + f"{self.commit}-leveldb_gen_bytes_vs_time.png", + ) + + # Validation plots + if data.validation_txadd: + txadd_minutes = [ + (e.timestamp - base_time).total_seconds() / 60 + for e in data.validation_txadd + ] + self._plot( + txadd_minutes, + [1] * len(txadd_minutes), + "Elapsed minutes", + "Transaction Additions", + "Transaction Additions to Mempool vs Time", + f"{self.commit}-validation_txadd_vs_time.png", + ) + + # CoinDB plots + if data.coindb_write_batch: + batch_minutes = [ + (e.timestamp - base_time).total_seconds() / 60 + for e in data.coindb_write_batch + ] + batch_sizes = [e.size_mb for e in data.coindb_write_batch] + self._plot( + batch_minutes, + batch_sizes, + "Elapsed minutes", + "Batch Size MiB", + "Coin Database Partial/Final Write Batch Size vs Time", + f"{self.commit}-coindb_write_batch_size_vs_time.png", + ) + + if data.coindb_commit: + commit_minutes = [ + (e.timestamp - base_time).total_seconds() / 60 + for e in data.coindb_commit + ] + commit_txouts = [e.txout_count for e in data.coindb_commit] + self._plot( + commit_minutes, + commit_txouts, + "Elapsed minutes", + "Transaction Output Count", + "Coin Database Transaction Output Committed vs Time", + f"{self.commit}-coindb_commit_txout_vs_time.png", + ) + + return self.generated_plots + + def _plot( + self, + x: list, + y: list, + x_label: str, + y_label: str, + title: str, + filename: str, + is_height_based: bool = False, + ) -> None: + """Generate a single plot.""" + if not x or not y: + logger.debug(f"Skipping plot '{title}' - no data") + return + + plt.figure(figsize=(30, 10)) + plt.scatter(x, y, alpha=0.6, s=20) + plt.title(title, fontsize=20) + plt.xlabel(x_label, fontsize=16) + plt.ylabel(y_label, fontsize=16) + plt.grid(True) + + min_x, max_x = min(x), max(x) + if min_x < max_x: + plt.xlim(min_x, max_x) + + # Add fork markers for height-based plots + if is_height_based: + self._add_fork_markers(min_x, max_x, max(y)) + + plt.xticks(rotation=90, fontsize=12) + plt.yticks(fontsize=12) + plt.tight_layout() + + output_path = self.output_dir / filename + plt.savefig(output_path) + plt.close() + + self.generated_plots.append(output_path) + logger.info(f"Saved plot: {output_path}") + + def _add_fork_markers(self, min_x: float, max_x: float, max_y: float) -> None: + """Add vertical lines for Bitcoin forks.""" + text_positions = {} + position_increment = max_y * 0.05 + current_position = max_y * 0.9 + + for fork_name, height in FORK_HEIGHTS.items(): + if min_x <= height <= max_x: + plt.axvline( + x=height, + color=FORK_COLORS[fork_name], + linestyle=FORK_STYLES[fork_name], + ) + + if height in text_positions: + text_positions[height] -= position_increment + else: + text_positions[height] = current_position + current_position -= position_increment + if current_position < max_y * 0.1: + current_position = max_y * 0.9 + + plt.text( + height, + text_positions[height], + f"{fork_name} ({height})", + rotation=90, + verticalalignment="top", + color=FORK_COLORS[fork_name], + ) + + +class AnalyzePhase: + """Analyze benchmark results and generate plots.""" + + def run( + self, + commit: str, + log_file: Path, + output_dir: Path, + ) -> AnalyzeResult: + """Analyze a debug.log and generate plots. + + Args: + commit: Commit hash (for naming) + log_file: Path to debug.log + output_dir: Where to save plots + + Returns: + AnalyzeResult with paths to generated plots + """ + if not HAS_MATPLOTLIB: + raise RuntimeError( + "matplotlib is required for plot generation. " + "Install with: pip install matplotlib" + ) + + if not log_file.exists(): + raise FileNotFoundError(f"Log file not found: {log_file}") + + output_dir.mkdir(parents=True, exist_ok=True) + + logger.info(f"Parsing log file: {log_file}") + parser = LogParser() + data = parser.parse_file(log_file) + + # Log parsed data summary + logger.info(f" UpdateTip entries: {len(data.update_tip)}") + logger.info(f" LevelDB compact entries: {len(data.leveldb_compact)}") + logger.info(f" LevelDB gen table entries: {len(data.leveldb_gen_table)}") + logger.info(f" Validation txadd entries: {len(data.validation_txadd)}") + logger.info(f" CoinDB write batch entries: {len(data.coindb_write_batch)}") + logger.info(f" CoinDB commit entries: {len(data.coindb_commit)}") + + logger.info(f"Generating plots for {commit[:12]}") + logger.info(f" Output directory: {output_dir}") + generator = PlotGenerator(commit[:12], output_dir) + plots = generator.generate_all(data) + + logger.info(f"Generated {len(plots)} plots") + + return AnalyzeResult( + commit=commit, + output_dir=output_dir, + plots=plots, + ) diff --git a/bench/benchmark.py b/bench/benchmark.py new file mode 100644 index 000000000000..ad89c72a4634 --- /dev/null +++ b/bench/benchmark.py @@ -0,0 +1,359 @@ +"""Benchmark phase - run hyperfine benchmark on a bitcoind binary.""" + +from __future__ import annotations + +import logging +import os +import shutil +import subprocess +import tempfile +from dataclasses import dataclass +from pathlib import Path +from typing import TYPE_CHECKING + +from .patchelf import ensure_binary_runnable + +if TYPE_CHECKING: + from .benchmark_config import BenchmarkConfig + from .capabilities import Capabilities + from .config import Config + + +logger = logging.getLogger(__name__) + + +def _find_mount_point(path: Path) -> Path: + """Walk up from path to find its mount point.""" + path = path.resolve() + while not path.is_mount(): + path = path.parent + return path + + +@dataclass +class BenchmarkResult: + """Result of the benchmark phase.""" + + results_file: Path + instrumented: str # "uninstrumented" or "instrumented" + name: str + flamegraph: Path | None = None + debug_log: Path | None = None + + +def parse_binary_spec(spec: str) -> tuple[str, Path]: + """Parse a binary spec like 'name:/path/to/binary'. + + Returns (name, path). + """ + if ":" not in spec: + raise ValueError(f"Invalid binary spec '{spec}': must be NAME:PATH") + name, path_str = spec.split(":", 1) + if not name: + raise ValueError(f"Invalid binary spec '{spec}': name cannot be empty") + return name, Path(path_str) + + +class BenchmarkPhase: + """Run hyperfine benchmark on a bitcoind binary.""" + + def __init__( + self, + config: Config, + capabilities: Capabilities, + benchmark_config: BenchmarkConfig | None = None, + ): + self.config = config + self.capabilities = capabilities + self.benchmark_config = benchmark_config + self._temp_scripts: list[Path] = [] + + @property + def is_instrumented(self) -> bool: + """Whether this benchmark run is instrumented (flamegraphs, debug logs).""" + return self.config.is_instrumented + + def run( + self, + binary: tuple[str, Path], + datadir: Path | None, + output_dir: Path, + ) -> BenchmarkResult: + """Run benchmark on given binary. + + Args: + binary: Tuple of (name, binary_path) + datadir: Source datadir with blockchain snapshot (None for fresh sync) + output_dir: Where to store results + + Returns: + BenchmarkResult with paths to outputs + """ + name, binary_path = binary + + # Validate binary exists + if not binary_path.exists(): + raise FileNotFoundError(f"Binary not found: {binary_path} ({name})") + + # Ensure binary can run on this system (patches guix binaries on NixOS) + if not ensure_binary_runnable(binary_path): + raise RuntimeError( + f"Binary {name} at {binary_path} cannot be made runnable" + ) + + # Check prerequisites + errors = self.capabilities.check_for_run(self.config.instrumented) + if errors: + raise RuntimeError("Benchmark prerequisites not met:\n" + "\n".join(errors)) + + # Log warnings about missing optional capabilities + for warning in self.capabilities.get_warnings(): + logger.warning(warning) + + # Setup directories + output_dir.mkdir(parents=True, exist_ok=True) + assert self.config.tmp_datadir is not None + tmp_datadir = Path(self.config.tmp_datadir) + tmp_datadir.mkdir(parents=True, exist_ok=True) + + results_file = output_dir / "results.json" + + logger.info("Starting benchmark") + logger.info(f" Output dir: {output_dir}") + logger.info(f" Temp datadir: {tmp_datadir}") + if datadir: + logger.info(f" Source datadir: {datadir}") + else: + logger.info(" Mode: Fresh sync (no source datadir)") + logger.info(f" Binary: {name} at {binary_path}") + logger.info(f" Instrumented: {self.config.instrumented}") + logger.info(f" Runs: {self.config.runs}") + logger.info(f" dbcache: {self.config.dbcache}") + if self.benchmark_config: + logger.info(f" Config: {self.benchmark_config.source_file}") + + try: + # Create hook scripts for hyperfine + setup_script = self._create_setup_script(tmp_datadir) + prepare_script = self._create_prepare_script(tmp_datadir, datadir) + cleanup_script = self._create_cleanup_script(tmp_datadir) + + # Build hyperfine command + cmd = self._build_hyperfine_cmd( + name=name, + binary_path=binary_path, + tmp_datadir=tmp_datadir, + results_file=results_file, + setup_script=setup_script, + prepare_script=prepare_script, + cleanup_script=cleanup_script, + output_dir=output_dir, + ) + + # Log the command being benchmarked + bitcoind_cmd = self._build_bitcoind_cmd(binary_path, tmp_datadir) + logger.info(f"Command to benchmark: {bitcoind_cmd}") + + if self.config.dry_run: + logger.info(f"[DRY RUN] Would run: {' '.join(cmd)}") + return BenchmarkResult( + results_file=results_file, + instrumented=self.config.instrumented, + name=name, + ) + + # Log the full hyperfine command + logger.info("Running hyperfine...") + logger.debug(f" Full command: {' '.join(cmd)}") + subprocess.run(cmd, check=True) + + # Collect result + result = BenchmarkResult( + results_file=results_file, + instrumented=self.config.instrumented, + name=name, + ) + + # Collect debug log (all runs) + debug_log_file = output_dir / f"{name}-debug.log" + if debug_log_file.exists(): + result.debug_log = debug_log_file + logger.info(f"Collected debug log: {debug_log_file}") + + # For instrumented runs, also collect flamegraph + if self.is_instrumented: + flamegraph_file = output_dir / f"{name}-flamegraph.svg" + if flamegraph_file.exists(): + result.flamegraph = flamegraph_file + logger.info(f"Collected flamegraph: {flamegraph_file}") + + # Clean up tmp_datadir + if tmp_datadir.exists(): + logger.debug(f"Cleaning up tmp_datadir: {tmp_datadir}") + shutil.rmtree(tmp_datadir) + + return result + + finally: + # Clean up temp scripts + for script in self._temp_scripts: + if script.exists(): + script.unlink() + self._temp_scripts.clear() + + def _create_temp_script(self, commands: list[str], name: str) -> Path: + """Create a temporary shell script.""" + content = "#!/usr/bin/env bash\nset -euxo pipefail\n" + content += "\n".join(commands) + "\n" + + fd, path = tempfile.mkstemp(suffix=".sh", prefix=f"bench_{name}_") + os.write(fd, content.encode()) + os.close(fd) + os.chmod(path, 0o755) + + script_path = Path(path) + self._temp_scripts.append(script_path) + logger.debug(f"Created {name} script: {script_path}") + for cmd in commands: + logger.debug(f" {cmd}") + return script_path + + def _create_setup_script(self, tmp_datadir: Path) -> Path: + """Create setup script (runs once before all timing runs).""" + commands = [ + f'mkdir -p "{tmp_datadir}"', + f'rm -rf "{tmp_datadir}"/*', + ] + + # TRIM SSD once before benchmarking for consistent write performance + if self.capabilities.can_fstrim: + mount = _find_mount_point(tmp_datadir) + commands.append(f'{self.capabilities.fstrim_path} "{mount}"') + return self._create_temp_script(commands, "setup") + + def _create_prepare_script( + self, tmp_datadir: Path, original_datadir: Path | None + ) -> Path: + """Create prepare script (runs before each timing run).""" + commands = [ + f'rm -rf "{tmp_datadir}"/*', + ] + + # Copy datadir if provided (skip for fresh sync) + if original_datadir: + commands.append(f'cp -r "{original_datadir}"/* "{tmp_datadir}"') + + # Drop caches if available + if self.capabilities.can_drop_caches and not self.config.no_cache_drop: + commands.append(self.capabilities.drop_caches_path) + + # Clean debug logs + commands.append( + f'find "{tmp_datadir}" -name debug.log -delete 2>/dev/null || true' + ) + + return self._create_temp_script(commands, "prepare") + + def _create_cleanup_script(self, tmp_datadir: Path) -> Path: + """Create cleanup script (runs after all timing runs).""" + commands = [ + f'rm -rf "{tmp_datadir}"/*', + ] + return self._create_temp_script(commands, "cleanup") + + def _build_bitcoind_cmd( + self, + binary: Path, + tmp_datadir: Path, + ) -> str: + """Build the bitcoind command string for hyperfine.""" + if not self.benchmark_config: + raise ValueError("benchmark_config is required") + + parts = [] + + # Add flamegraph wrapper for instrumented mode + if self.is_instrumented: + parts.append("flamegraph") + parts.append("--palette bitcoin") + parts.append("--title 'bitcoind IBD'") + parts.append("-c 'record -F 101 --call-graph fp'") + parts.append("--") + + # Bitcoind command + parts.append(str(binary)) + parts.append(f"-datadir={tmp_datadir}") + + # Add dbcache from matrix entry + parts.append(f"-dbcache={self.config.dbcache}") + + # Add all bitcoind args from benchmark config + for key, value in self.benchmark_config.bitcoind_args.items(): + formatted = self.benchmark_config._format_bitcoind_arg(key, value) + if formatted: + parts.append(formatted) + + # Debug flags for instrumented mode + if self.is_instrumented and self.benchmark_config.instrumented_debug: + for flag in self.benchmark_config.instrumented_debug: + parts.append(f"-debug={flag}") + + return " ".join(parts) + + def _build_hyperfine_cmd( + self, + name: str, + binary_path: Path, + tmp_datadir: Path, + results_file: Path, + setup_script: Path, + prepare_script: Path, + cleanup_script: Path, + output_dir: Path, + ) -> list[str]: + """Build the hyperfine command.""" + cmd = [ + "hyperfine", + "--shell=bash", + f"--setup={setup_script}", + f"--prepare={prepare_script}", + f"--cleanup={cleanup_script}", + f"--runs={self.config.runs}", + f"--export-json={results_file}", + "--show-output", + f"--command-name={name}", + ] + + # Build the actual command to benchmark + bitcoind_cmd = self._build_bitcoind_cmd(binary_path, tmp_datadir) + + # Append conclude logic (debug.log for all, flamegraph for instrumented) + conclude = self._create_conclude_commands(name, tmp_datadir, output_dir) + bitcoind_cmd += f" && {conclude}" + + cmd.append(bitcoind_cmd) + + return cmd + + def _create_conclude_commands( + self, + name: str, + tmp_datadir: Path, + output_dir: Path, + ) -> str: + """Create inline conclude commands for the binary.""" + commands = [] + + # Move flamegraph if exists (instrumented only) + if self.is_instrumented: + commands.append( + f'if [ -e flamegraph.svg ]; then mv flamegraph.svg "{output_dir}/{name}-flamegraph.svg"; fi' + ) + + # Copy debug log if exists (all runs) + commands.append( + f'debug_log=$(find "{tmp_datadir}" -name debug.log -print -quit); ' + f'if [ -n "$debug_log" ]; then cp "$debug_log" "{output_dir}/{name}-debug.log"; fi' + ) + + return " && ".join(commands) diff --git a/bench/benchmark_config.py b/bench/benchmark_config.py new file mode 100644 index 000000000000..c975c146f736 --- /dev/null +++ b/bench/benchmark_config.py @@ -0,0 +1,260 @@ +"""Benchmark configuration from TOML files. + +Provides a portable, reproducible benchmark config that can be shared +to run identical benchmarks on different machines. +""" + +from __future__ import annotations + +import itertools +import logging +import tomllib +from dataclasses import dataclass, field +from pathlib import Path +from typing import Any + +logger = logging.getLogger(__name__) + + +@dataclass +class BenchmarkConfig: + """Benchmark configuration loaded from TOML. + + This represents a portable benchmark specification that can be shared + to reproduce benchmarks on different machines. + """ + + # Benchmark metadata + full_ibd: bool + start_height: int + runs: int + + # Parameter matrix - each key maps to list of values + # These create multiple benchmark configurations + matrix: dict[str, list[Any]] = field(default_factory=dict) + + # All bitcoind flags (optional - empty/missing values excluded from command) + bitcoind_args: dict[str, Any] = field(default_factory=dict) + + # Instrumented mode debug flags + instrumented_debug: list[str] = field(default_factory=list) + + # Source file path (for reference) + source_file: Path | None = None + + @classmethod + def from_toml(cls, path: Path) -> BenchmarkConfig: + """Load configuration from a TOML file. + + Expected format: + [benchmark] + start_height = 840000 + runs = 2 + + [bitcoind] + stopatheight = 855000 + chain = "main" + connect = "..." + prune = 1000000 + daemon = false + printtoconsole = false + + [bitcoind.matrix] + dbcache = [450, 32000] + instrumentation = ["uninstrumented", "instrumented"] + + [bitcoind.instrumented] + debug = ["coindb", "leveldb", "bench", "validation"] + """ + with open(path, "rb") as f: + data = tomllib.load(f) + + benchmark = data.get("benchmark", {}) + bitcoind = data.get("bitcoind", {}).copy() + + # Extract matrix from bitcoind section + matrix: dict[str, list[Any]] = bitcoind.pop("matrix", {}) + + # Extract instrumented debug flags (separate from regular bitcoind args) + instrumented = bitcoind.pop("instrumented", {}) + instrumented_debug = instrumented.get("debug", []) + + # Full IBD mode: skip datadir copy, sync from genesis + full_ibd = benchmark.get("full_ibd", False) + start_height = 0 if full_ibd else benchmark.get("start_height", 0) + + config = cls( + full_ibd=full_ibd, + start_height=start_height, + runs=benchmark.get("runs", 3), + matrix=matrix, + bitcoind_args=bitcoind, + instrumented_debug=instrumented_debug, + source_file=path, + ) + + logger.info(f"Loaded benchmark config from {path}") + if config.full_ibd: + logger.info(" Mode: Full IBD (fresh sync from genesis)") + logger.info(f" Start height: {config.start_height}, Runs: {config.runs}") + if config.matrix: + logger.info(f" Matrix parameters: {list(config.matrix.keys())}") + if config.bitcoind_args: + logger.info(f" Bitcoind flags: {list(config.bitcoind_args.keys())}") + + return config + + @staticmethod + def _value_to_name(value: Any) -> str: + """Convert a matrix value to a name string.""" + if isinstance(value, bool): + return str(value).lower() + return str(value) + + def expand_matrix(self) -> list[dict[str, Any]]: + """Expand parameter matrix into list of configurations. + + Returns list of dicts, each containing: + - name: combined name from values like "450-uninstrumented" + - All parameter values from the matrix + + Example: + matrix = { + 'dbcache': [450, 32000], + 'instrumentation': ['uninstrumented', 'instrumented'] + } + + Returns: + [ + {'name': '450-uninstrumented', 'dbcache': 450, 'instrumentation': 'uninstrumented'}, + {'name': '450-instrumented', 'dbcache': 450, 'instrumentation': 'instrumented'}, + {'name': '32000-uninstrumented', 'dbcache': 32000, 'instrumentation': 'uninstrumented'}, + {'name': '32000-instrumented', 'dbcache': 32000, 'instrumentation': 'instrumented'}, + ] + """ + if not self.matrix: + return [{"name": "default"}] + + # Get all parameter names and their values + param_names = list(self.matrix.keys()) + param_values = [self.matrix[name] for name in param_names] + + # Generate all combinations + results = [] + for combination in itertools.product(*param_values): + entry: dict[str, Any] = {} + + # Build combined name from values + name_parts = [self._value_to_name(v) for v in combination] + entry["name"] = "-".join(name_parts) + + # Add each parameter value + for param_name, value in zip(param_names, combination): + entry[param_name] = value + + results.append(entry) + + return results + + def get_matrix_entry(self, name: str) -> dict[str, Any] | None: + """Get a specific matrix entry by its combined name. + + Args: + name: Combined name like "default-uninstrumented" + + Returns: + Dict with parameter values, or None if not found + """ + for entry in self.expand_matrix(): + if entry["name"] == name: + return entry + return None + + def get_matrix_names(self) -> list[str]: + """Get list of all matrix entry names.""" + return [entry["name"] for entry in self.expand_matrix()] + + def _format_bitcoind_arg(self, key: str, value: Any) -> str | None: + """Format a single bitcoind argument, returning None if it should be skipped.""" + # Skip empty strings and None + if value is None or value == "": + return None + + # Format based on type + if isinstance(value, bool): + return f"-{key}={1 if value else 0}" + else: + return f"-{key}={value}" + + def generate_command_template(self) -> str: + """Generate bitcoind command template with placeholders. + + Placeholders use {param} format for matrix parameters. + Empty/missing bitcoind args are excluded. + + Returns command like: + bitcoind -datadir={datadir} -dbcache={dbcache} -stopatheight=855000 ... + """ + parts = ["bitcoind"] + + # Placeholder for datadir (always user-provided) + parts.append("-datadir={datadir}") + + # Matrix parameters as placeholders + for param_name in self.matrix.keys(): + if ( + param_name != "instrumentation" + ): # instrumentation is a mode, not a bitcoind param + parts.append(f"-{param_name}={{{param_name}}}") + + # Bitcoind args from config (skip empty/missing) + for key, value in self.bitcoind_args.items(): + formatted = self._format_bitcoind_arg(key, value) + if formatted: + parts.append(formatted) + + return " ".join(parts) + + def get_bitcoind_arg(self, key: str, default: Any = None) -> Any: + """Get a bitcoind arg value, with optional default.""" + return self.bitcoind_args.get(key, default) + + def to_dict(self) -> dict[str, Any]: + """Convert to dictionary for JSON serialization. + + This captures the config for logging with results. + """ + result: dict[str, Any] = { + "full_ibd": self.full_ibd, + "start_height": self.start_height, + "runs": self.runs, + "command_template": self.generate_command_template(), + } + + # Include non-empty bitcoind args + bitcoind = {k: v for k, v in self.bitcoind_args.items() if v not in (None, "")} + if bitcoind: + result["bitcoind"] = bitcoind + + # Include matrix definition + if self.matrix: + result["matrix"] = self.matrix + + return result + + def validate(self) -> list[str]: + """Validate configuration, return list of errors.""" + errors = [] + + if self.start_height < 0: + errors.append("start_height must be non-negative") + + if self.runs < 1: + errors.append("runs must be positive") + + # Validate matrix entries are non-empty lists + for param_name, values in self.matrix.items(): + if not values: + errors.append(f"matrix.{param_name} must have at least one value") + + return errors diff --git a/bench/build.py b/bench/build.py new file mode 100644 index 000000000000..7e51e2140030 --- /dev/null +++ b/bench/build.py @@ -0,0 +1,165 @@ +"""Build phase - compile bitcoind at a specified commit.""" + +from __future__ import annotations + +import logging +import shutil +import subprocess +from dataclasses import dataclass +from pathlib import Path +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from .capabilities import Capabilities + from .config import Config + +from .utils import GitState, git_checkout, git_rev_parse + +logger = logging.getLogger(__name__) + + +@dataclass +class BuiltBinary: + """A built binary.""" + + name: str + path: Path + commit: str + + +@dataclass +class BuildResult: + """Result of the build phase.""" + + binary: BuiltBinary + + +def parse_commit_spec(spec: str) -> tuple[str, str | None]: + """Parse a commit spec like 'abc123:name' or 'abc123'. + + Returns (commit, name) where name may be None. + """ + if ":" in spec: + commit, name = spec.split(":", 1) + return commit, name + return spec, None + + +class BuildPhase: + """Build bitcoind binary at a specified commit.""" + + def __init__( + self, + config: Config, + capabilities: Capabilities, + repo_path: Path | None = None, + ): + self.config = config + self.capabilities = capabilities + self.repo_path = repo_path or Path.cwd() + + def run( + self, + commit_spec: str, + output_dir: Path | None = None, + ) -> BuildResult: + """Build bitcoind at given commit. + + Args: + commit_spec: Commit spec like 'abc123:name' or 'abc123' + output_dir: Where to store binary (default: ./binaries) + + Returns: + BuildResult with the built binary + """ + # Check prerequisites + errors = self.capabilities.check_for_build() + if errors: + raise RuntimeError("Build prerequisites not met:\n" + "\n".join(errors)) + + output_dir = output_dir or Path(self.config.binaries_dir) + + # Parse commit spec and resolve to full hash + commit, name = parse_commit_spec(commit_spec) + commit_hash = git_rev_parse(commit, self.repo_path) + + # Default name to short hash if not provided + if name is None: + name = commit_hash[:12] + + logger.info(f"Building binary: {name} ({commit_hash[:12]})") + logger.info(f" Repo: {self.repo_path}") + logger.info(f" Output: {output_dir}") + + # Setup output path + binary_dir = output_dir / name + binary_dir.mkdir(parents=True, exist_ok=True) + binary_path = binary_dir / "bitcoind" + + # Check if we can skip existing build + if self.config.skip_existing and binary_path.exists(): + logger.info(f" Skipping {name} - binary exists") + return BuildResult( + binary=BuiltBinary(name=name, path=binary_path, commit=commit_hash) + ) + + # Save git state for restoration + git_state = GitState(self.repo_path) + git_state.save() + + try: + self._build_commit(name, commit_hash, binary_path) + finally: + # Always restore git state + git_state.restore() + + return BuildResult( + binary=BuiltBinary(name=name, path=binary_path, commit=commit_hash) + ) + + def _build_commit(self, name: str, commit: str, output_path: Path) -> None: + """Build bitcoind for a commit.""" + logger.info(f"Building {name} ({commit[:12]})") + + if self.config.dry_run: + logger.info(f" [DRY RUN] Would build {commit[:12]} -> {output_path}") + return + + # Checkout the commit + logger.info(f" Checking out {commit[:12]}...") + git_checkout(commit, self.repo_path) + + # Build with nix + cmd = ["nix", "build", "-L"] + + logger.info(f" Running: {' '.join(cmd)}") + logger.info(f" Working directory: {self.repo_path}") + result = subprocess.run( + cmd, + cwd=self.repo_path, + ) + + if result.returncode != 0: + raise RuntimeError(f"Build failed for {name} ({commit[:12]})") + + # Copy binary to output location + nix_binary = self.repo_path / "result" / "bin" / "bitcoind" + if not nix_binary.exists(): + raise RuntimeError(f"Built binary not found at {nix_binary}") + + logger.info(f" Copying {nix_binary} -> {output_path}") + + # Remove existing binary if present (may be read-only from nix) + if output_path.exists(): + output_path.chmod(0o755) + output_path.unlink() + + shutil.copy2(nix_binary, output_path) + output_path.chmod(0o755) # Ensure it's executable and writable + logger.info(f" Built {name} binary: {output_path}") + + # Clean up nix result symlink + result_link = self.repo_path / "result" + if result_link.is_symlink(): + logger.debug(f" Removing nix result symlink: {result_link}") + result_link.unlink() diff --git a/bench/capabilities.py b/bench/capabilities.py new file mode 100644 index 000000000000..5cb67bb98b3b --- /dev/null +++ b/bench/capabilities.py @@ -0,0 +1,152 @@ +"""System capability detection for graceful degradation. + +Detects available tools and features, allowing the benchmark to run +on systems without all capabilities (with appropriate warnings). +""" + +from __future__ import annotations + +import os +import shutil +from dataclasses import dataclass +from pathlib import Path + + +# Known paths for drop-caches on NixOS +DROP_CACHES_PATHS = [ + "/run/wrappers/bin/drop-caches", + "/usr/local/bin/drop-caches", +] + +# Known paths for fstrim wrapper +FSTRIM_PATHS = [ + "/run/wrappers/bin/fstrim", + "/usr/local/bin/fstrim", +] + + +@dataclass +class Capabilities: + """Detected system capabilities.""" + + # Cache management + can_drop_caches: bool + drop_caches_path: str | None + + # Disk TRIM + can_fstrim: bool + fstrim_path: str | None + + # Required tools + has_hyperfine: bool + has_flamegraph: bool + has_perf: bool + has_nix: bool + + # System info + cpu_count: int + is_nixos: bool + is_ci: bool + + def check_for_run(self, instrumented: str | bool = "uninstrumented") -> list[str]: + """Check if we have required capabilities for a benchmark run. + + Args: + instrumented: Either "instrumented"/"uninstrumented" string or legacy bool + + Returns list of errors (empty if all good). + """ + errors = [] + + if not self.has_hyperfine: + errors.append("hyperfine not found in PATH (required for benchmarking)") + + # Handle both string and bool for backwards compatibility + is_instrumented = ( + instrumented == "instrumented" + if isinstance(instrumented, str) + else instrumented + ) + if is_instrumented: + if not self.has_flamegraph: + errors.append( + "flamegraph not found in PATH (required for --instrumented)" + ) + if not self.has_perf: + errors.append("perf not found in PATH (required for --instrumented)") + + return errors + + def check_for_build(self) -> list[str]: + """Check if we have required capabilities for building. + + Returns list of errors (empty if all good). + """ + errors = [] + + if not self.has_nix: + errors.append("nix not found in PATH (required for building)") + + return errors + + def get_warnings(self) -> list[str]: + """Get warnings about missing optional capabilities.""" + warnings = [] + + if not self.can_drop_caches: + warnings.append( + "drop-caches not available - cache won't be cleared between runs" + ) + + if not self.can_fstrim: + warnings.append( + "fstrim not available - SSD TRIM won't run before benchmarks" + ) + + return warnings + + +def _check_executable(name: str) -> bool: + """Check if an executable is available in PATH.""" + return shutil.which(name) is not None + + +def _find_drop_caches() -> str | None: + """Find drop-caches executable.""" + for path in DROP_CACHES_PATHS: + if Path(path).exists() and os.access(path, os.X_OK): + return path + return None + + +def _find_fstrim() -> str | None: + """Find fstrim executable.""" + for path in FSTRIM_PATHS: + if Path(path).exists() and os.access(path, os.X_OK): + return path + return None + + +def _is_nixos() -> bool: + """Check if we're running on NixOS.""" + return Path("/etc/NIXOS").exists() + + +def detect_capabilities() -> Capabilities: + """Auto-detect system capabilities.""" + drop_caches_path = _find_drop_caches() + fstrim_path = _find_fstrim() + + return Capabilities( + can_drop_caches=drop_caches_path is not None, + drop_caches_path=drop_caches_path, + can_fstrim=fstrim_path is not None, + fstrim_path=fstrim_path, + has_hyperfine=_check_executable("hyperfine"), + has_flamegraph=_check_executable("flamegraph"), + has_perf=_check_executable("perf"), + has_nix=_check_executable("nix"), + cpu_count=os.cpu_count() or 1, + is_nixos=_is_nixos(), + is_ci=os.environ.get("CI", "").lower() in ("true", "1", "yes"), + ) diff --git a/bench/config.py b/bench/config.py new file mode 100644 index 000000000000..35af8f5c116c --- /dev/null +++ b/bench/config.py @@ -0,0 +1,235 @@ +"""Configuration management for benchcoin. + +Layered configuration (lowest to highest priority): +1. Built-in defaults +2. bench.toml config file +3. Environment variables (BENCH_*) +4. CLI arguments +""" + +from __future__ import annotations + +import os +import tomllib +from dataclasses import dataclass +from pathlib import Path +from typing import Any + + +# Built-in defaults +DEFAULTS = { + "chain": "main", + "dbcache": 450, + "stop_height": 855000, + "runs": 3, + "connect": "", # Empty = use public P2P network + "binaries_dir": "./binaries", + "output_dir": "./bench-output", +} + +# Profile overrides +PROFILES = { + "quick": { + "stop_height": 1500, + "runs": 1, + }, + "full": { + "stop_height": 855000, + "runs": 3, + }, + "ci": { + "stop_height": 855000, + "runs": 3, + "connect": "148.251.128.115:33333", + }, +} + +# Environment variable mapping +ENV_MAPPING = { + "BENCH_DATADIR": "datadir", + "BENCH_TMP_DATADIR": "tmp_datadir", + "BENCH_BINARIES_DIR": "binaries_dir", + "BENCH_OUTPUT_DIR": "output_dir", + "BENCH_STOP_HEIGHT": "stop_height", + "BENCH_DBCACHE": "dbcache", + "BENCH_CONNECT": "connect", + "BENCH_RUNS": "runs", + "BENCH_CHAIN": "chain", +} + + +@dataclass +class Config: + """Benchmark configuration.""" + + # Core benchmark settings + chain: str = "main" + dbcache: int = 450 + stop_height: int = 855000 + runs: int = 3 + connect: str = "" # Empty = use public P2P network + + # Paths + datadir: str | None = None + tmp_datadir: str | None = None + binaries_dir: str = "./binaries" + output_dir: str = "./bench-output" + + # Behavior flags + instrumented: str = "uninstrumented" # "uninstrumented" or "instrumented" + skip_existing: bool = False + no_cache_drop: bool = False + verbose: bool = False + dry_run: bool = False + + # Profile used (for reference) + profile: str = "full" + + @property + def is_instrumented(self) -> bool: + """Whether instrumented mode is enabled (flamegraphs, debug logs).""" + return self.instrumented == "instrumented" + + def __post_init__(self) -> None: + # If tmp_datadir not set, derive from output_dir + if self.tmp_datadir is None: + self.tmp_datadir = str(Path(self.output_dir) / "tmp-datadir") + + # Instrumented mode forces runs=1 + if self.is_instrumented and self.runs != 1: + self.runs = 1 + + def validate(self) -> list[str]: + """Validate configuration, return list of errors.""" + errors = [] + + # datadir is optional (None = fresh sync) + if self.datadir is not None and not Path(self.datadir).exists(): + errors.append(f"datadir does not exist: {self.datadir}") + + if self.stop_height < 1: + errors.append("stop_height must be positive") + + if self.dbcache < 1: + errors.append("dbcache must be positive") + + if self.runs < 1: + errors.append("runs must be positive") + + if self.chain not in ("main", "testnet", "signet", "regtest"): + errors.append(f"invalid chain: {self.chain}") + + return errors + + +def load_toml(path: Path) -> tuple[dict[str, Any], dict[str, dict[str, Any]]]: + """Load configuration from TOML file. + + Returns: + Tuple of (base_config, profiles_dict) + """ + if not path.exists(): + return {}, {} + + with open(path, "rb") as f: + data = tomllib.load(f) + + # Flatten structure: merge [defaults] and [paths] into top level + result = {} + if "defaults" in data: + result.update(data["defaults"]) + if "paths" in data: + result.update(data["paths"]) + + # Extract profiles + profiles = data.get("profiles", {}) + + return result, profiles + + +def load_env() -> dict[str, Any]: + """Load configuration from environment variables.""" + result = {} + + for env_var, config_key in ENV_MAPPING.items(): + value = os.environ.get(env_var) + if value is not None: + # Convert numeric values + if config_key in ("stop_height", "dbcache", "runs"): + try: + value = int(value) + except ValueError: + pass # Keep as string, will fail validation + result[config_key] = value + + return result + + +def apply_profile( + config: dict[str, Any], + profile_name: str, + toml_profiles: dict[str, dict[str, Any]] | None = None, +) -> dict[str, Any]: + """Apply a named profile to configuration. + + Args: + config: Base configuration dict + profile_name: Name of profile to apply + toml_profiles: Profiles loaded from TOML file (override built-in) + """ + result = config.copy() + result["profile"] = profile_name + + # Apply built-in profile first + if profile_name in PROFILES: + result.update(PROFILES[profile_name]) + + # Then apply TOML profile (overrides built-in) + if toml_profiles and profile_name in toml_profiles: + result.update(toml_profiles[profile_name]) + + return result + + +def build_config( + cli_args: dict[str, Any] | None = None, + config_file: Path | None = None, + profile: str = "full", +) -> Config: + """Build configuration from all sources. + + Priority (lowest to highest): + 1. Built-in defaults + 2. Config file (bench.toml) base settings + 3. Built-in profile overrides + 4. Config file profile overrides + 5. Environment variables + 6. CLI arguments + """ + # Start with defaults + config = DEFAULTS.copy() + + # Load config file + if config_file is None: + config_file = Path("bench.toml") + file_config, toml_profiles = load_toml(config_file) + config.update(file_config) + + # Apply profile (built-in first, then TOML overrides) + config = apply_profile(config, profile, toml_profiles) + + # Load environment variables + env_config = load_env() + config.update(env_config) + + # Apply CLI arguments (filter out None values) + if cli_args: + for key, value in cli_args.items(): + if value is not None: + config[key] = value + + # Build Config object (filter to only valid fields) + valid_fields = {f.name for f in Config.__dataclass_fields__.values()} + filtered = {k: v for k, v in config.items() if k in valid_fields} + + return Config(**filtered) diff --git a/bench/configs/nightly.toml b/bench/configs/nightly.toml new file mode 100644 index 000000000000..c07a7f04e091 --- /dev/null +++ b/bench/configs/nightly.toml @@ -0,0 +1,24 @@ +# Nightly benchmark configuration +# Clone benchcoin + use this config = reproduce the benchmark +# +# Usage: +# bench.py run --benchmark-config bench/configs/nightly.toml --matrix-entry 450 \ +# --datadir /data/pruned-840k --output-dir ./output \ +# master:/path/to/bitcoind + +[benchmark] +start_height = 840000 +runs = 2 + +[bitcoind] +stopatheight = 900000 +chain = "main" +connect = "148.251.128.115:33333" # accepts whitelisted ip addrs only +prune = 1000000 +daemon = false +printtoconsole = false + +# Parameter matrix - each value runs separately +# Use --matrix-entry to select one (e.g., --matrix-entry 450) +[bitcoind.matrix] +dbcache = [450, 32000] diff --git a/bench/configs/pr-noassumevalid.toml b/bench/configs/pr-noassumevalid.toml new file mode 100644 index 000000000000..aac45ca4954e --- /dev/null +++ b/bench/configs/pr-noassumevalid.toml @@ -0,0 +1,26 @@ +# PR benchmark configuration with assumevalid=0 (full script verification) +# Measures validation/script verification performance ("what it's like at tip") +# +# Usage: +# bench.py run --benchmark-config bench/configs/pr-noassumevalid.toml --matrix-entry 450-uninstrumented \ +# --datadir /data/pruned-840k --output-dir ./output \ +# pr:/path/to/pr/bitcoind + +[benchmark] +start_height = 840000 +runs = 2 + +[bitcoind] +stopatheight = 900000 +chain = "main" +connect = "148.251.128.115:33333" +prune = 1000000 +daemon = false +printtoconsole = false +assumevalid = "0" + +# Parameter matrix - creates multiple benchmark configurations +# Matrix expands to: 450-uninstrumented, 32000-uninstrumented +[bitcoind.matrix] +dbcache = [450, 32000] +instrumentation = ["uninstrumented"] diff --git a/bench/configs/pr.toml b/bench/configs/pr.toml new file mode 100644 index 000000000000..3513deba34ec --- /dev/null +++ b/bench/configs/pr.toml @@ -0,0 +1,29 @@ +# PR benchmark configuration +# Clone benchcoin + use this config = reproduce the benchmark +# +# Usage: +# bench.py run --benchmark-config bench/configs/pr.toml --matrix-entry 450-uninstrumented \ +# --datadir /data/pruned-840k --output-dir ./output \ +# pr:/path/to/pr/bitcoind + +[benchmark] +start_height = 840000 +runs = 2 + +[bitcoind] +stopatheight = 900000 +chain = "main" +connect = "148.251.128.115:33333" +prune = 1000000 +daemon = false +printtoconsole = false + +# Parameter matrix - creates multiple benchmark configurations +# Matrix expands to: 450-uninstrumented, 450-instrumented, 32000-uninstrumented, 32000-instrumented +[bitcoind.matrix] +dbcache = [450, 32000] +instrumentation = ["uninstrumented", "instrumented"] + +# Debug flags enabled when instrumentation = "instrumented" +[bitcoind.instrumented] +debug = ["coindb", "leveldb", "bench", "validation"] diff --git a/bench/configs/test-signet.toml b/bench/configs/test-signet.toml new file mode 100644 index 000000000000..8e66414c64f3 --- /dev/null +++ b/bench/configs/test-signet.toml @@ -0,0 +1,21 @@ +# Test benchmark configuration for signet +# Quick local testing without dedicated sync peer +# +# Usage: +# bench.py run --benchmark-config bench/configs/test-signet.toml --matrix-entry 450 \ +# --datadir /path/to/signet-datadir --output-dir ./output \ +# test:./binaries/test/bitcoind + +[benchmark] +start_height = 0 +runs = 1 + +[bitcoind] +stopatheight = 10000 +chain = "signet" +prune = 1000000 +daemon = false +printtoconsole = false + +[bitcoind.matrix] +dbcache = [450] diff --git a/bench/machine.py b/bench/machine.py new file mode 100644 index 000000000000..641e0ba2e0c1 --- /dev/null +++ b/bench/machine.py @@ -0,0 +1,208 @@ +"""Machine specification detection for benchmark context.""" + +from __future__ import annotations + +import logging +import subprocess +from dataclasses import dataclass +from typing import Any + +logger = logging.getLogger(__name__) + + +@dataclass +class MachineSpecs: + """Machine hardware specifications.""" + + cpu_model: str + architecture: str + cpu_cores: int + disk_type: str + os_kernel: str + total_ram_gb: float + + def to_dict(self) -> dict[str, Any]: + """Convert to dictionary for JSON serialization.""" + return { + "cpu_model": self.cpu_model, + "architecture": self.architecture, + "cpu_cores": self.cpu_cores, + "disk_type": self.disk_type, + "os_kernel": self.os_kernel, + "total_ram_gb": self.total_ram_gb, + } + + @classmethod + def from_dict(cls, data: dict[str, Any]) -> MachineSpecs: + """Create from dictionary.""" + return cls( + cpu_model=data.get("cpu_model", "Unknown"), + architecture=data.get("architecture", "Unknown"), + cpu_cores=data.get("cpu_cores", 0), + disk_type=data.get("disk_type", "Unknown"), + os_kernel=data.get("os_kernel", "Unknown"), + total_ram_gb=data.get("total_ram_gb", 0.0), + ) + + def get_machine_id(self) -> str: + """Get short machine identifier from architecture. + + Returns: + Short ID like "amd64" or "arm64" + """ + return get_machine_id(self.architecture) + + +def _run_command(cmd: list[str]) -> str: + """Run a command and return stdout, or empty string on failure.""" + try: + result = subprocess.run(cmd, capture_output=True, text=True, timeout=5) + return result.stdout.strip() + except (subprocess.TimeoutExpired, FileNotFoundError, OSError) as e: + logger.debug(f"Command {cmd} failed: {e}") + return "" + + +def _get_cpu_info() -> tuple[str, str, int]: + """Get CPU model, architecture, and core count from lscpu.""" + output = _run_command(["lscpu"]) + + cpu_model = "Unknown" + architecture = "Unknown" + cpu_cores = 0 + + for line in output.split("\n"): + if ":" not in line: + continue + key, value = line.split(":", 1) + key = key.strip() + value = value.strip() + + if key == "Model name": + cpu_model = value + elif key == "Architecture": + architecture = value + elif key == "CPU(s)": + try: + cpu_cores = int(value) + except ValueError: + pass + + # Fallback for architecture + if architecture == "Unknown": + architecture = _run_command(["uname", "-m"]) or "Unknown" + + return cpu_model, architecture, cpu_cores + + +def _get_os_kernel() -> str: + """Get the OS kernel version from uname -r.""" + kernel = _run_command(["uname", "-r"]) + return kernel if kernel else "Unknown" + + +def _get_total_ram_gb() -> float: + """Get total RAM in GB from /proc/meminfo.""" + try: + with open("/proc/meminfo") as f: + for line in f: + if line.startswith("MemTotal:"): + # Format: "MemTotal: 16384000 kB" + parts = line.split() + if len(parts) >= 2: + kb = int(parts[1]) + return round(kb / (1024 * 1024), 1) # Convert kB to GB + except (OSError, ValueError) as e: + logger.debug(f"Failed to read /proc/meminfo: {e}") + return 0.0 + + +def _get_disk_type() -> str: + """Get the fastest disk type on the system. + + Priority: NVMe > SATA SSD > HDD + Uses lsblk to check ROTA (rotational) flag: 0 = SSD/NVMe, 1 = HDD + """ + output = _run_command(["lsblk", "-d", "-o", "NAME,ROTA,MODEL", "-n"]) + + has_nvme = False + has_ssd = False + has_hdd = False + + for line in output.split("\n"): + if not line.strip(): + continue + + parts = line.split() + if len(parts) < 2: + continue + + name = parts[0] + try: + rota = int(parts[1]) + except (ValueError, IndexError): + continue + + if name.startswith("nvme"): + has_nvme = True + elif rota == 0: + has_ssd = True + elif rota == 1: + has_hdd = True + + if has_nvme: + return "NVMe SSD" + elif has_ssd: + return "SATA SSD" + elif has_hdd: + return "HDD" + else: + return "Unknown" + + +def get_machine_specs() -> MachineSpecs: + """Detect and return current machine specifications.""" + cpu_model, architecture, cpu_cores = _get_cpu_info() + disk_type = _get_disk_type() + os_kernel = _get_os_kernel() + total_ram_gb = _get_total_ram_gb() + + specs = MachineSpecs( + cpu_model=cpu_model, + architecture=architecture, + cpu_cores=cpu_cores, + disk_type=disk_type, + os_kernel=os_kernel, + total_ram_gb=total_ram_gb, + ) + + logger.info( + f"Detected machine: {cpu_model} ({architecture}, {cpu_cores} cores, " + f"{total_ram_gb}GB RAM, {disk_type}, {os_kernel})" + ) + return specs + + +# Architecture to short ID mapping +ARCH_TO_ID = { + "x86_64": "amd64", + "amd64": "amd64", + "aarch64": "arm64", + "arm64": "arm64", +} + + +def get_machine_id(architecture: str | None = None) -> str: + """Get short machine identifier from architecture. + + Args: + architecture: Architecture string (e.g., "x86_64", "aarch64"). + If None, auto-detect from system. + + Returns: + Short ID like "amd64" or "arm64" + """ + if architecture is None: + architecture = _run_command(["uname", "-m"]) or "unknown" + + return ARCH_TO_ID.get(architecture.lower(), architecture.lower()) diff --git a/bench/nightly.py b/bench/nightly.py new file mode 100644 index 000000000000..d8615a00fb4f --- /dev/null +++ b/bench/nightly.py @@ -0,0 +1,517 @@ +"""Nightly benchmark history management and chart generation.""" + +from __future__ import annotations + +import hashlib +import json +import logging +import re +from dataclasses import dataclass +from datetime import date +from pathlib import Path +from typing import Any + +logger = logging.getLogger(__name__) + +NUM_COLORS = 10 + + +def series_color_index(key: str) -> int: + """Compute deterministic color index from series key using MD5.""" + hash_bytes = hashlib.md5(key.encode()).digest() + return int.from_bytes(hash_bytes[:4], "little") % NUM_COLORS + + +def _normalize_cpu_model(cpu_model: str) -> str: + """Normalize CPU model to short identifier. + + Examples: + "AMD Ryzen 7 7700 8-Core Processor" -> "ryzen77008core" + "AMD EPYC 7763 64-Core Processor" -> "epyc776364core" + "Apple M2 Pro" -> "applem2pro" + "Intel(R) Core(TM) i7-12700K" -> "corei712700k" + """ + model = cpu_model.lower() + for word in ["processor", "(r)", "(tm)", "amd", "intel"]: + model = model.replace(word, "") + parts = [p.strip() for p in model.split() if p.strip()] + return "".join(parts).replace("-", "").replace(" ", "")[:25] + + +def _bucket_ram(ram_gb: float) -> int: + """Bucket RAM to nearest standard size for grouping.""" + if ram_gb <= 0: + return 0 + if ram_gb <= 32: + return round(ram_gb / 8) * 8 + return round(ram_gb / 32) * 32 + + +def _normalize_kernel(kernel: str) -> str: + """Extract major.minor from kernel version.""" + parts = kernel.split(".") + if len(parts) >= 2: + return f"{parts[0]}.{parts[1].split('-')[0]}" + return kernel.split("-")[0] + + +def _normalize_disk_type(disk_type: str) -> str: + """Normalize disk type to short form.""" + disk_lower = disk_type.lower() + if "nvme" in disk_lower: + return "nvme" + if "ssd" in disk_lower: + return "ssd" + if "hdd" in disk_lower: + return "hdd" + return disk_lower[:10] + + +def _extract_cpu_short_name(cpu_model: str) -> str: + """Extract a readable short CPU name for labels.""" + patterns = [ + r"Ryzen \d+ \d+", + r"EPYC \d+", + r"M\d+ (?:Pro|Max|Ultra)?", + r"i[3579]-\d+\w*", + r"Xeon \w+-\d+", + ] + + for pattern in patterns: + match = re.search(pattern, cpu_model, re.IGNORECASE) + if match: + return match.group(0) + + words = cpu_model.replace("(R)", "").replace("(TM)", "").split() + meaningful = [ + w + for w in words + if w.lower() not in ["processor", "core", "amd", "intel", "apple"] + ] + return " ".join(meaningful[:2]) if meaningful else cpu_model[:20] + + +def series_key(result: "NightlyResult") -> str: + """Generate unique series key from machine specs and config. + + Format: {cpu_short}|{ram}GB|{disk}|{kernel}|db{dbcache}|prune{prune}|{start}-{stop} + Example: ryzen77008core|64GB|nvme|6.6|db450|prune1000000|840000-900000 + """ + machine = result.machine or {} + config = result.config or {} + bitcoind = config.get("bitcoind", {}) + + cpu = _normalize_cpu_model(machine.get("cpu_model", "unknown")) + ram = _bucket_ram(machine.get("total_ram_gb", 0)) + disk = _normalize_disk_type(machine.get("disk_type", "unknown")) + kernel = _normalize_kernel(machine.get("os_kernel", "unknown")) + + dbcache = bitcoind.get("dbcache", result.dbcache) + prune = bitcoind.get("prune", 0) + start = config.get("start_height", 0) + stop = bitcoind.get("stopatheight", 0) + + return f"{cpu}|{ram}GB|{disk}|{kernel}|db{dbcache}|prune{prune}|{start}-{stop}" + + +def series_label(result: "NightlyResult") -> str: + """Generate human-readable series label for chart legend.""" + machine = result.machine or {} + config = result.config or {} + bitcoind = config.get("bitcoind", {}) + + arch = machine.get("architecture", "unknown") + cpu_model = machine.get("cpu_model", "Unknown") + cpu_short = _extract_cpu_short_name(cpu_model) + ram = machine.get("total_ram_gb", 0) + ram_str = f"{int(ram)}GB RAM" if ram else "?GB RAM" + + start = config.get("start_height", 0) + stop = bitcoind.get("stopatheight", 0) + block_range = f"{start}-{stop}" if start and stop else "?-?" + + dbcache = result.dbcache + prune = bitcoind.get("prune", 0) + + prune_str = f", prune {prune}" if prune else "" + return ( + f"{arch}, {cpu_short}, {ram_str}, {block_range}, dbcache {dbcache}{prune_str}" + ) + + +@dataclass +class NightlyResult: + """A single nightly benchmark result with embedded config and machine info.""" + + date: str # Commit date (YYYY-MM-DD) - displayed on chart X-axis + commit: str + mean: float + stddev: float + runs: int + config: dict[ + str, Any + ] # Full benchmark config (dbcache inside config.bitcoind.dbcache) + machine: dict[str, Any] # Full machine specs + run_date: str = "" # When benchmark was executed (reference only) + trigger: str = ( + "scheduled" # "scheduled" (nightly cron) or "manual" (workflow_dispatch) + ) + + @property + def dbcache(self) -> int: + """Extract dbcache from nested config.""" + return self.config.get("bitcoind", {}).get("dbcache", 0) + + @property + def machine_id(self) -> str: + """Get short machine ID from architecture.""" + from bench.machine import ARCH_TO_ID + + arch = self.machine.get("architecture", "unknown") + return ARCH_TO_ID.get(arch.lower(), arch.lower()) + + @property + def instrumentation(self) -> str: + """Get instrumentation mode from config.""" + return self.config.get("instrumentation", "uninstrumented") + + def to_dict(self) -> dict[str, Any]: + """Convert to dictionary for JSON serialization.""" + result = { + "date": self.date, + "commit": self.commit, + "mean": self.mean, + "stddev": self.stddev, + "runs": self.runs, + "config": self.config, + "machine": self.machine, + } + if self.run_date: + result["run_date"] = self.run_date + if self.trigger != "scheduled": + result["trigger"] = self.trigger + return result + + @classmethod + def from_dict(cls, data: dict[str, Any]) -> NightlyResult: + """Create from dictionary. + + Handles both new format (embedded config/machine) and legacy format. + """ + # Check if this is the new format (has config as dict) + if isinstance(data.get("config"), dict): + return cls( + date=data["date"], + commit=data["commit"], + mean=data["mean"], + stddev=data["stddev"], + runs=data["runs"], + config=data["config"], + machine=data.get("machine", {}), + run_date=data.get("run_date", ""), + trigger=data.get("trigger", "scheduled"), + ) + + # Legacy format - convert to new format + dbcache = data.get("dbcache", 0) + return cls( + date=data["date"], + commit=data["commit"], + mean=data["mean"], + stddev=data["stddev"], + runs=data["runs"], + config={ + "bitcoind": {"dbcache": dbcache}, + "instrumentation": "uninstrumented", + }, + machine={}, + run_date=data.get("run_date", ""), + ) + + +class NightlyHistory: + """Manages the nightly benchmark history stored in JSON. + + Each result is self-contained with its own config and machine info. + """ + + def __init__(self, history_file: Path): + self.history_file = history_file + self.results: list[NightlyResult] = [] + self._load() + + def _load(self) -> None: + """Load history from JSON file.""" + if self.history_file.exists(): + with open(self.history_file) as f: + data = json.load(f) + self.results = [NightlyResult.from_dict(r) for r in data.get("results", [])] + logger.info(f"Loaded {len(self.results)} results from {self.history_file}") + else: + self.results = [] + logger.info(f"No existing history at {self.history_file}") + + def save(self) -> None: + """Save history to JSON file.""" + self.history_file.parent.mkdir(parents=True, exist_ok=True) + data: dict = {"results": [r.to_dict() for r in self.results]} + with open(self.history_file, "w") as f: + json.dump(data, f, indent=2) + logger.info(f"Saved {len(self.results)} results to {self.history_file}") + + def append(self, result: NightlyResult) -> None: + """Append a new result to history. + + Scheduled runs dedup by (date, commit, dbcache) to handle retries. + Manual runs are always appended as additional data points. + """ + if result.trigger == "scheduled": + for existing in self.results: + if ( + existing.trigger == "scheduled" + and existing.date == result.date + and existing.commit == result.commit + and existing.dbcache == result.dbcache + ): + logger.warning( + f"Replacing scheduled result for {result.date} {result.commit[:8]} dbcache={result.dbcache}" + ) + self.results.remove(existing) + break + + self.results.append(result) + # Sort by date, then dbcache + self.results.sort(key=lambda r: (r.date, r.dbcache)) + logger.info( + f"Appended result: {result.date} {result.commit[:8]} dbcache={result.dbcache} {result.mean:.1f}s" + ) + + def get_latest(self, dbcache: int | str) -> NightlyResult | None: + """Get the most recent result for a given dbcache config. + + Args: + dbcache: DB cache size in MB (int) or config name like '450', '32000' + + Returns: + Most recent NightlyResult for that dbcache, or None if not found + """ + # Handle string config names like '450', '32000' + if isinstance(dbcache, str): + try: + dbcache = int(dbcache) + except ValueError: + return None + + matching = [r for r in self.results if r.dbcache == dbcache] + if not matching: + return None + # Results are sorted by date, so last one is most recent + return matching[-1] + + def get_recent_median( + self, dbcache: int | str, n: int = 7 + ) -> tuple[float, list[NightlyResult]] | None: + """Get the median mean of the most recent N scheduled results for a dbcache config. + + Args: + dbcache: DB cache size in MB (int) or config name like '450', '32000' + n: Number of recent results to average + + Returns: + Tuple of (median_mean, list_of_results_used), or None if no results found + """ + if isinstance(dbcache, str): + try: + dbcache = int(dbcache) + except ValueError: + return None + + matching = [ + r + for r in self.results + if r.dbcache == dbcache and r.trigger == "scheduled" + ] + if not matching: + return None + + recent = matching[-n:] + sorted_means = sorted(r.mean for r in recent) + mid = len(sorted_means) // 2 + if len(sorted_means) % 2 == 0: + median = (sorted_means[mid - 1] + sorted_means[mid]) / 2 + else: + median = sorted_means[mid] + return median, recent + + def get_chart_data(self) -> list[dict]: + """Get results in format suitable for chart embedding. + + Returns data with series_key and series_label for dynamic grouping. + Also includes legacy 'config' field for backward compatibility. + """ + chart_data = [] + for r in self.results: + key = series_key(r) + chart_data.append( + { + "date": r.date, + "commit": r.commit, + "mean": r.mean, + "stddev": r.stddev, + "config": str(r.dbcache), # Legacy compatibility + "series_key": key, + "series_label": series_label(r), + "color_index": series_color_index(key), + "trigger": r.trigger, + "run_date": r.run_date, + } + ) + return chart_data + + def append_from_results_json( + self, + results_file: Path, + commit: str, + benchmark_config: dict[str, Any], + machine_specs: dict[str, Any], + date_str: str | None = None, + run_date: str = "", + trigger: str = "scheduled", + ) -> None: + """Append result from a hyperfine results.json file. + + Args: + results_file: Path to hyperfine results.json + commit: Git commit hash + benchmark_config: Full benchmark config dict (includes bitcoind.dbcache) + machine_specs: Machine specs dict + date_str: Commit date string (YYYY-MM-DD), defaults to today + run_date: When the benchmark was executed (YYYY-MM-DD), for reference + """ + if not results_file.exists(): + raise FileNotFoundError(f"Results file not found: {results_file}") + + with open(results_file) as f: + data = json.load(f) + + # Hyperfine output has a "results" array with one entry per command + # For nightly, we only have one command (master) + results = data.get("results", []) + if not results: + raise ValueError(f"No results found in {results_file}") + + # Use the first (and should be only) result + result_data = results[0] + mean = result_data.get("mean", 0) + stddev = result_data.get("stddev", 0) + runs = len(result_data.get("times", [])) + + if date_str is None: + date_str = date.today().isoformat() + + result = NightlyResult( + date=date_str, + commit=commit, + mean=mean, + stddev=stddev if stddev else 0, + runs=runs, + config=benchmark_config, + machine=machine_specs, + run_date=run_date, + trigger=trigger, + ) + self.append(result) + + +def generate_nightly_chart(history: NightlyHistory, output_file: Path) -> None: + """Generate the nightly chart HTML page. + + Args: + history: NightlyHistory instance with loaded results + output_file: Path to write index.html + """ + from bench.render import render_template + + chart_data = history.get_chart_data() + html = render_template("nightly-chart.html", chart_data=chart_data) + + output_file.parent.mkdir(parents=True, exist_ok=True) + output_file.write_text(html) + logger.info(f"Generated nightly chart: {output_file}") + + +class NightlyPhase: + """CLI interface for nightly benchmark operations.""" + + def __init__(self, history_file: Path): + self.history_file = history_file + + def append( + self, + results_file: Path, + commit: str, + dbcache: int, + date_str: str | None = None, + benchmark_config_file: Path | None = None, + instrumentation: str = "uninstrumented", + machine_specs_file: Path | None = None, + run_date: str = "", + trigger: str = "scheduled", + ) -> None: + """Append a result from hyperfine results.json to history. + + Args: + results_file: Path to hyperfine results.json + commit: Git commit hash + dbcache: DB cache size in MB + date_str: Commit date string (YYYY-MM-DD), defaults to today + benchmark_config_file: Path to benchmark config TOML + instrumentation: Instrumentation mode ('uninstrumented' or 'instrumented') + machine_specs_file: Path to pre-captured machine specs JSON (optional) + run_date: When the benchmark was executed (YYYY-MM-DD), for reference + """ + from bench.benchmark_config import BenchmarkConfig + + history = NightlyHistory(self.history_file) + + # Get machine specs from file if provided, otherwise detect current machine + if machine_specs_file: + machine_specs = json.loads(machine_specs_file.read_text()) + logger.info(f"Using pre-captured machine specs from {machine_specs_file}") + else: + from bench.machine import get_machine_specs + + machine_specs = get_machine_specs().to_dict() + + # Build benchmark config dict + if benchmark_config_file: + benchmark_config = BenchmarkConfig.from_toml(benchmark_config_file) + config_dict = benchmark_config.to_dict() + else: + config_dict = {} + + # Ensure dbcache is in the config + if "bitcoind" not in config_dict: + config_dict["bitcoind"] = {} + config_dict["bitcoind"]["dbcache"] = dbcache + config_dict["instrumentation"] = instrumentation + + history.append_from_results_json( + results_file=results_file, + commit=commit, + benchmark_config=config_dict, + machine_specs=machine_specs, + date_str=date_str, + run_date=run_date, + trigger=trigger, + ) + history.save() + + def chart(self, output_file: Path) -> None: + """Generate the nightly chart HTML. + + Args: + output_file: Path to write index.html + """ + history = NightlyHistory(self.history_file) + generate_nightly_chart(history, output_file) diff --git a/bench/patchelf.py b/bench/patchelf.py new file mode 100644 index 000000000000..6da1e00867cf --- /dev/null +++ b/bench/patchelf.py @@ -0,0 +1,135 @@ +"""Patchelf utilities for fixing guix-built binaries on NixOS.""" + +from __future__ import annotations + +import logging +import os +import subprocess +from pathlib import Path + +logger = logging.getLogger(__name__) + + +def get_nix_interpreter() -> str | None: + """Get the path to the nix store's dynamic linker. + + Returns None if not on NixOS or can't find it. + """ + # Check if we're on NixOS + if not Path("/etc/NIXOS").exists(): + return None + + # Find the interpreter from the current glibc + # We can get this by checking what the current shell uses + try: + result = subprocess.run( + ["patchelf", "--print-interpreter", "/bin/sh"], + capture_output=True, + text=True, + ) + if result.returncode == 0: + interp = result.stdout.strip() + if interp and Path(interp).exists(): + return interp + except FileNotFoundError: + pass + + return None + + +def get_binary_interpreter(binary: Path) -> str | None: + """Get the interpreter (dynamic linker) of a binary.""" + try: + result = subprocess.run( + ["patchelf", "--print-interpreter", str(binary)], + capture_output=True, + text=True, + ) + if result.returncode == 0: + return result.stdout.strip() + except FileNotFoundError: + logger.debug("patchelf not found") + return None + + +def needs_patching(binary: Path) -> bool: + """Check if a binary needs to be patched for NixOS. + + Returns True if: + - We're on NixOS + - The binary has a non-nix interpreter (e.g., /lib64/ld-linux-x86-64.so.2) + """ + nix_interp = get_nix_interpreter() + if not nix_interp: + # Not on NixOS, no patching needed + return False + + binary_interp = get_binary_interpreter(binary) + if not binary_interp: + # Can't determine interpreter, assume no patching needed + return False + + # Check if the binary's interpreter is already in the nix store + if binary_interp.startswith("/nix/store/"): + return False + + # Binary uses a non-nix interpreter (e.g., /lib64/...) + return True + + +def patch_binary(binary: Path) -> bool: + """Patch a binary to use the nix store's dynamic linker. + + Returns True if patching was successful or not needed. + """ + if not needs_patching(binary): + logger.debug(f"Binary {binary} does not need patching") + return True + + nix_interp = get_nix_interpreter() + if not nix_interp: + logger.warning("Cannot patch binary: unable to find nix interpreter") + return False + + original_interp = get_binary_interpreter(binary) + logger.info(f"Patching binary: {binary}") + logger.info(f" Original interpreter: {original_interp}") + logger.info(f" New interpreter: {nix_interp}") + + # Make sure binary is writable + try: + os.chmod(binary, 0o755) + except OSError as e: + logger.warning(f"Could not make binary writable: {e}") + + try: + result = subprocess.run( + ["patchelf", "--set-interpreter", nix_interp, str(binary)], + capture_output=True, + text=True, + ) + if result.returncode != 0: + logger.error(f"patchelf failed: {result.stderr}") + return False + logger.info(" Patching successful") + return True + except FileNotFoundError: + logger.error("patchelf not found - install it or use nix develop") + return False + + +def ensure_binary_runnable(binary: Path) -> bool: + """Ensure a binary can run on this system. + + Patches the binary if necessary (on NixOS with non-nix binaries). + Returns True if the binary should be runnable. + """ + if not binary.exists(): + logger.error(f"Binary not found: {binary}") + return False + + # Check if patching is needed and do it + if needs_patching(binary): + return patch_binary(binary) + + return True diff --git a/bench/render.py b/bench/render.py new file mode 100644 index 000000000000..9f4893644211 --- /dev/null +++ b/bench/render.py @@ -0,0 +1,24 @@ +"""Jinja2 template rendering utilities.""" + +from __future__ import annotations + +from pathlib import Path +from typing import Any + +from jinja2 import Environment, FileSystemLoader, select_autoescape + + +def get_template_env() -> Environment: + """Get the Jinja2 environment for rendering templates.""" + template_dir = Path(__file__).parent / "templates" + return Environment( + loader=FileSystemLoader(template_dir), + autoescape=select_autoescape(["html", "xml"]), + ) + + +def render_template(template_name: str, **context: Any) -> str: + """Render a template with the given context.""" + env = get_template_env() + template = env.get_template(template_name) + return template.render(**context) diff --git a/bench/report.py b/bench/report.py new file mode 100644 index 000000000000..da517e801057 --- /dev/null +++ b/bench/report.py @@ -0,0 +1,699 @@ +"""Report phase - generate HTML reports from benchmark results. + +Ported from the JavaScript logic in .github/workflows/publish-results.yml. +""" + +from __future__ import annotations + +import json +import logging +import shutil +from dataclasses import dataclass, field +from datetime import date +from pathlib import Path +from typing import Any + +from bench.analyze import HAS_MATPLOTLIB, LogParser, PlotGenerator +from bench.nightly import ( + NightlyHistory, + series_color_index, + series_key, + series_label, +) +from bench.render import render_template + +logger = logging.getLogger(__name__) + + +def format_config_display( + dbcache: int, + machine_id: str | None = None, + instrumentation: str | None = None, + noassumevalid: bool = False, +) -> str: + """Format config for display. + + Args: + dbcache: DB cache size in MB + machine_id: Machine ID (e.g., "amd64", "arm64") + instrumentation: Instrumentation mode (e.g., "instrumented", "uninstrumented") + noassumevalid: Whether assumevalid=0 was used + + Returns: + Display string like "dbcache=450MB (amd64, instrumented)" + + Examples: + >>> format_config_display(450) + 'dbcache=450MB' + >>> format_config_display(32000, "amd64") + 'dbcache=32GB (amd64)' + >>> format_config_display(450, noassumevalid=True) + 'dbcache=450MB (assumevalid=0)' + """ + # Format dbcache with unit + if dbcache >= 1000: + cache_str = f"{dbcache // 1000}GB" + else: + cache_str = f"{dbcache}MB" + + # Build optional parts (exclude "uninstrumented" as it's the default) + parts = [] + if machine_id: + parts.append(machine_id) + if instrumentation and instrumentation != "uninstrumented": + parts.append(instrumentation) + if noassumevalid: + parts.append("assumevalid=0") + + if parts: + return f"dbcache={cache_str} ({', '.join(parts)})" + return f"dbcache={cache_str}" + + +def parse_network_name(network: str) -> tuple[int, str, bool]: + """Parse a network/config name to extract dbcache, instrumentation, and noassumevalid. + + Args: + network: Network name like "450-uninstrumented", "noav-32000-uninstrumented", "450" + + Returns: + Tuple of (dbcache_int, instrumentation_str, noassumevalid_bool) + + Examples: + >>> parse_network_name("450-uninstrumented") + (450, 'uninstrumented', False) + >>> parse_network_name("noav-32000-uninstrumented") + (32000, 'uninstrumented', True) + >>> parse_network_name("450") + (450, 'uninstrumented', False) + """ + noassumevalid = network.startswith("noav-") + if noassumevalid: + network = network[len("noav-"):] + + parts = network.split("-") + try: + dbcache = int(parts[0]) + except ValueError: + dbcache = 0 + + instrumentation = parts[1] if len(parts) > 1 else "uninstrumented" + return dbcache, instrumentation, noassumevalid + + +@dataclass +class BenchmarkRun: + """Parsed benchmark run data.""" + + network: str + command: str + mean: float + stddev: float | None + user: float + system: float + parameters: dict[str, Any] = field(default_factory=dict) + + +@dataclass +class ReportResult: + """Result of report generation.""" + + output_dir: Path + index_file: Path + speedups: dict[str, float] + + +class ReportGenerator: + """Generate HTML reports from benchmark results.""" + + def __init__( + self, + repo_url: str = "https://github.com/bitcoin-dev-tools/benchcoin", + nightly_history: NightlyHistory | None = None, + ): + self.repo_url = repo_url + self.nightly_history = nightly_history + + def generate_multi_network( + self, + network_dirs: dict[str, Path], + output_dir: Path, + title: str = "Benchmark Results", + pr_number: str | None = None, + run_id: str | None = None, + commit: str | None = None, + ) -> ReportResult: + """Generate HTML report from multiple network benchmark results. + + Args: + network_dirs: Dict mapping network name to directory containing results.json + output_dir: Where to write the HTML report + title: Title for the report + pr_number: PR number (for CI reports) + run_id: Run ID (for CI reports) + commit: Commit hash for PR (used in chart) + + Returns: + ReportResult with paths and speedup data + """ + output_dir.mkdir(parents=True, exist_ok=True) + + # Combine results from all networks + all_runs: list[BenchmarkRun] = [] + for network, input_dir in network_dirs.items(): + results_file = input_dir / "results.json" + if not results_file.exists(): + logger.warning( + f"results.json not found in {input_dir} for network {network}" + ) + continue + + with open(results_file) as f: + data = json.load(f) + + # Parse and add network to each run + for result in data.get("results", []): + all_runs.append( + BenchmarkRun( + network=network, + command=result.get("command", ""), + mean=result.get("mean", 0), + stddev=result.get("stddev"), + user=result.get("user", 0), + system=result.get("system", 0), + parameters=result.get("parameters", {}), + ) + ) + + # Copy artifacts from this network + self._copy_network_artifacts(network, input_dir, output_dir) + + if not all_runs: + raise ValueError("No benchmark results found in any network directory") + + # Calculate nightly comparison (for uninstrumented configs only) + nightly_comparison = self._calculate_nightly_comparison(all_runs, commit) + + # Build title with PR/run info if provided + full_title = title + if pr_number and run_id: + full_title = f"PR #{pr_number} - Run {run_id}" + + # Generate HTML + html = self._generate_html( + all_runs, + nightly_comparison, + full_title, + output_dir, + output_dir, + commit, + run_id, + ) + + # Write report + index_file = output_dir / "index.html" + index_file.write_text(html) + logger.info(f"Generated report: {index_file}") + + # Write combined results.json with nightly comparison + combined_results: dict[str, Any] = { + "results": [ + { + "network": run.network, + "command": run.command, + "mean": run.mean, + "stddev": run.stddev, + "user": run.user, + "system": run.system, + } + for run in all_runs + ], + } + if nightly_comparison: + combined_results["nightly_comparison"] = nightly_comparison + + results_file = output_dir / "results.json" + results_file.write_text(json.dumps(combined_results, indent=2)) + + # Return speedups derived from nightly comparison for backwards compatibility + speedups = { + config: data["speedup_percent"] + for config, data in nightly_comparison.items() + if data.get("speedup_percent") is not None + } + + return ReportResult( + output_dir=output_dir, + index_file=index_file, + speedups=speedups, + ) + + def generate( + self, + input_dir: Path, + output_dir: Path, + title: str = "Benchmark Results", + ) -> ReportResult: + """Generate HTML report from benchmark artifacts (single binary mode). + + Args: + input_dir: Directory containing results.json and artifacts + output_dir: Where to write the HTML report + title: Title for the report + + Returns: + ReportResult with paths + """ + output_dir.mkdir(parents=True, exist_ok=True) + + # Load results.json + results_file = input_dir / "results.json" + if not results_file.exists(): + raise FileNotFoundError(f"results.json not found in {input_dir}") + + with open(results_file) as f: + data = json.load(f) + + # Parse results + runs = self._parse_results(data) + + # Copy artifacts first so plots are available for template rendering + self._copy_artifacts(input_dir, output_dir) + + # Generate HTML (no nightly comparison in single-directory mode) + html = self._generate_html(runs, {}, title, input_dir, output_dir) + + # Write report + index_file = output_dir / "index.html" + index_file.write_text(html) + logger.info(f"Generated report: {index_file}") + + return ReportResult( + output_dir=output_dir, + index_file=index_file, + speedups={}, + ) + + def generate_index( + self, + results_dir: Path, + output_file: Path, + ) -> None: + """Generate main index.html listing all available results. + + Args: + results_dir: Directory containing pr-* subdirectories + output_file: Where to write index.html + """ + results = [] + + if results_dir.exists(): + for pr_dir in sorted( + results_dir.iterdir(), + key=lambda d: (0, int(d.name.replace("pr-", ""))) + if d.name.startswith("pr-") and d.name.replace("pr-", "").isdigit() + else (1, d.name), + ): + if pr_dir.is_dir() and pr_dir.name.startswith("pr-"): + pr_num = pr_dir.name.replace("pr-", "") + pr_runs = [] + for run_dir in sorted(pr_dir.iterdir()): + if run_dir.is_dir(): + pr_runs.append(run_dir.name) + if pr_runs: + results.append((pr_num, pr_runs)) + + html = render_template("results-index.html", results=results) + output_file.write_text(html) + logger.info(f"Generated index: {output_file}") + + def _parse_results(self, data: dict) -> list[BenchmarkRun]: + """Parse results from hyperfine JSON output.""" + runs = [] + + # Handle both direct hyperfine output and combined results format + results = data.get("results", []) + + for result in results: + runs.append( + BenchmarkRun( + network=result.get("network", "default"), + command=result.get("command", ""), + mean=result.get("mean", 0), + stddev=result.get("stddev"), + user=result.get("user", 0), + system=result.get("system", 0), + parameters=result.get("parameters", {}), + ) + ) + + return runs + + def _calculate_nightly_comparison( + self, runs: list[BenchmarkRun], commit: str | None = None + ) -> dict[str, dict[str, Any]]: + """Calculate comparison against nightly baseline. + + Compares PR results against the median of the most recent 7 nightly results + for each config. Only considers uninstrumented configs. + + Args: + runs: List of benchmark runs + commit: PR commit hash + + Returns: + Dict mapping config to comparison data: + { + "450": { + "pr_mean": 14500.0, + "pr_stddev": 100.0, + "nightly_mean": 14800.0, + "nightly_count": 7, + "nightly_date_range": "2026-01-01 to 2026-01-07", + "speedup_percent": 2.0 + } + } + """ + comparison: dict[str, dict[str, Any]] = {} + + if not self.nightly_history: + logger.warning("No nightly history available for comparison") + return comparison + + # Group runs by network/config, only uninstrumented + for run in runs: + network = run.network + + # Skip instrumented configs + if network.endswith("-true") or network.endswith("-instrumented"): + continue + + # Extract base config name (e.g., "450-false" -> "450", "450-uninstrumented" -> "450") + config = network.replace("-false", "").replace("-uninstrumented", "") + + # Get PR result mean + pr_mean = run.mean + pr_stddev = run.stddev + + # Get median of recent nightly results for this config + result = self.nightly_history.get_recent_median(config, n=7) + + if result: + nightly_median, recent_results = result + speedup = None + if nightly_median > 0: + speedup = round( + ((nightly_median - pr_mean) / nightly_median) * 100, 1 + ) + + # Use the latest result for series key/label and chart positioning + latest = recent_results[-1] + + comparison[config] = { + "pr_mean": pr_mean, + "pr_stddev": pr_stddev, + "pr_commit": commit, + "nightly_mean": nightly_median, + "nightly_count": len(recent_results), + "nightly_date_range": f"{recent_results[0].date} to {recent_results[-1].date}", + "speedup_percent": speedup, + "series_key": series_key(latest), + "series_label": series_label(latest), + } + else: + # No nightly data, just record PR result + comparison[config] = { + "pr_mean": pr_mean, + "pr_stddev": pr_stddev, + "pr_commit": commit, + "nightly_mean": None, + "nightly_count": 0, + "nightly_date_range": None, + "speedup_percent": None, + } + + return comparison + + def _copy_network_artifacts( + self, network: str, input_dir: Path, output_dir: Path + ) -> None: + """Copy artifacts from a network directory with network prefix.""" + # Copy flamegraphs with network prefix + for svg in input_dir.glob("*-flamegraph.svg"): + dest = output_dir / f"{network}-{svg.name}" + shutil.copy2(svg, dest) + logger.debug(f"Copied {svg.name} as {dest.name}") + + # Generate plots from debug logs (logs themselves are available as CI artifacts) + if HAS_MATPLOTLIB: + for log in input_dir.glob("*-debug.log"): + name = log.name.removesuffix("-debug.log") + prefix = f"{network}-{name}" + plots_dir = output_dir / "plots" + plots_dir.mkdir(parents=True, exist_ok=True) + try: + data = LogParser().parse_file(log) + plots = PlotGenerator(prefix, plots_dir).generate_all(data) + logger.info(f"Generated {len(plots)} plots for {prefix}") + except Exception: + logger.warning( + f"Failed to generate plots for {prefix}", exc_info=True + ) + + def _generate_html( + self, + runs: list[BenchmarkRun], + nightly_comparison: dict[str, dict[str, Any]], + title: str, + input_dir: Path, + output_dir: Path, + commit: str | None = None, + run_id: str | None = None, + ) -> str: + """Generate the HTML report.""" + sorted_runs = sorted(runs, key=lambda r: r.network) + + runs_data = [] + for run in sorted_runs: + dbcache, instrumentation, noassumevalid = parse_network_name(run.network) + config_display = format_config_display( + dbcache, instrumentation=instrumentation, noassumevalid=noassumevalid + ) + runs_data.append( + { + "config_display": config_display, + "mean": run.mean, + "stddev": run.stddev, + "user": run.user, + "system": run.system, + } + ) + + nightly_data, pr_chart_data = self._prepare_nightly_data( + nightly_comparison, commit + ) + + graphs = self._prepare_graphs_data(runs, input_dir, output_dir) + + nightly_chart_data = None + if pr_chart_data and self.nightly_history: + nightly_chart_data = self.nightly_history.get_chart_data() + + ci_run_url = f"{self.repo_url}/actions/runs/{run_id}" if run_id else None + + return render_template( + "pr-report.html", + title=title, + runs=runs_data, + nightly_comparison=nightly_data, + pr_chart_data=pr_chart_data, + nightly_chart_data=nightly_chart_data, + graphs=graphs, + repo_url=self.repo_url, + ci_run_url=ci_run_url, + ) + + def _prepare_nightly_data( + self, + nightly_comparison: dict[str, dict[str, Any]], + commit: str | None = None, + ) -> tuple[dict[str, dict[str, Any]], list[dict]]: + """Prepare nightly comparison data for template rendering. + + Returns: + Tuple of (nightly_comparison_with_display, pr_chart_data) + """ + if not nightly_comparison: + return {}, [] + + result = {} + pr_chart_data = [] + + for config, data in sorted(nightly_comparison.items()): + noassumevalid = config.startswith("noav-") + raw = config[len("noav-"):] if noassumevalid else config + try: + dbcache = int(raw) + except ValueError: + dbcache = 0 + + result[config] = { + **data, + "config_display": format_config_display( + dbcache, noassumevalid=noassumevalid + ), + } + + if data.get("nightly_mean"): + key = data.get("series_key", f"unknown|db{config}|0-0") + pr_chart_data.append( + { + "config": config, + "mean": data["pr_mean"], + "stddev": data.get("pr_stddev") or 0, + "commit": commit or "unknown", + "date": date.today().isoformat(), + "series_key": key, + "series_label": data.get("series_label", f"{config} dbcache"), + "color_index": series_color_index(key), + } + ) + + return result, pr_chart_data + + def _prepare_graphs_data( + self, + runs: list[BenchmarkRun], + input_dir: Path, + output_dir: Path, + ) -> list[dict]: + """Prepare flamegraphs and debug logs data for template rendering.""" + graphs = [] + + for run in runs: + name = run.command + network = run.network + + flamegraph_name = None + network_prefixed = f"{network}-{name}-flamegraph.svg" + non_prefixed = f"{name}-flamegraph.svg" + + if (output_dir / network_prefixed).exists(): + flamegraph_name = network_prefixed + elif (input_dir / non_prefixed).exists(): + flamegraph_name = non_prefixed + + plots = [] + plots_dir = output_dir / "plots" + if plots_dir.exists(): + for prefix in [f"{network}-{name}", name]: + plot_files = sorted(plots_dir.glob(f"{prefix}-*.png")) + if plot_files: + plots = [f"plots/{p.name}" for p in plot_files] + break + + if not flamegraph_name and not plots: + continue + + display_label = f"{network} - {name}" if network != "default" else name + + graphs.append( + { + "label": display_label, + "flamegraph": flamegraph_name, + "plots": plots, + } + ) + + return graphs + + def _copy_artifacts(self, input_dir: Path, output_dir: Path) -> None: + """Copy flamegraphs and generate plots from debug logs.""" + same_dir = input_dir.resolve() == output_dir.resolve() + + if not same_dir: + for svg in input_dir.glob("*-flamegraph.svg"): + dest = output_dir / svg.name + shutil.copy2(svg, dest) + logger.debug(f"Copied {svg.name}") + + if HAS_MATPLOTLIB: + for log in input_dir.glob("*-debug.log"): + name = log.name.removesuffix("-debug.log") + plots_dir = output_dir / "plots" + plots_dir.mkdir(parents=True, exist_ok=True) + try: + data = LogParser().parse_file(log) + plots = PlotGenerator(name, plots_dir).generate_all(data) + logger.info(f"Generated {len(plots)} plots for {name}") + except Exception: + logger.warning( + f"Failed to generate plots for {name}", exc_info=True + ) + + +class ReportPhase: + """Generate reports from benchmark results.""" + + def __init__( + self, + repo_url: str = "https://github.com/bitcoin-dev-tools/benchcoin", + nightly_history_file: Path | None = None, + ): + nightly_history: NightlyHistory | None = None + if nightly_history_file and nightly_history_file.exists(): + nightly_history = NightlyHistory(nightly_history_file) + self.generator = ReportGenerator(repo_url, nightly_history) + + def run( + self, + input_dir: Path, + output_dir: Path, + title: str = "Benchmark Results", + ) -> ReportResult: + """Generate report from benchmark artifacts. + + Args: + input_dir: Directory containing results.json and artifacts + output_dir: Where to write the HTML report + title: Title for the report + + Returns: + ReportResult with paths and speedup data + """ + return self.generator.generate(input_dir, output_dir, title) + + def run_multi_network( + self, + network_dirs: dict[str, Path], + output_dir: Path, + title: str = "Benchmark Results", + pr_number: str | None = None, + run_id: str | None = None, + commit: str | None = None, + ) -> ReportResult: + """Generate report from multiple network benchmark results. + + Args: + network_dirs: Dict mapping network name to directory containing results.json + output_dir: Where to write the HTML report + title: Title for the report + pr_number: PR number (for CI reports) + run_id: Run ID (for CI reports) + commit: Commit hash for PR + + Returns: + ReportResult with paths and speedup data + """ + return self.generator.generate_multi_network( + network_dirs, output_dir, title, pr_number, run_id, commit + ) + + def update_index(self, results_dir: Path, output_file: Path) -> None: + """Update the main index.html listing all results. + + Args: + results_dir: Directory containing pr-* subdirectories + output_file: Where to write index.html + """ + self.generator.generate_index(results_dir, output_file) diff --git a/bench/templates/base.html b/bench/templates/base.html new file mode 100644 index 000000000000..d5f5f1ad5c57 --- /dev/null +++ b/bench/templates/base.html @@ -0,0 +1,14 @@ + + + + + + {% block title %}Benchcoin{% endblock %} + + {% block head %}{% endblock %} + + + {% block content %}{% endblock %} + {% block scripts %}{% endblock %} + + diff --git a/bench/templates/nightly-chart.html b/bench/templates/nightly-chart.html new file mode 100644 index 000000000000..0ebf835bd61a --- /dev/null +++ b/bench/templates/nightly-chart.html @@ -0,0 +1,207 @@ +{% extends 'base.html' %} + +{% block title %}Bitcoin Core Nightly IBD Benchmark{% endblock %} + +{% block head %} + + +{% endblock %} + +{% block body_class %}p-4 md:p-8{% endblock %} + +{% block content %} +{% include 'partials/theme-toggle.html' %} +
+

Bitcoin Core Nightly IBD Benchmark

+

+ IBD from a single networked peer +

+
+
+
+

+ View PR benchmark results +

+
+{% endblock %} + +{% block scripts %} + +{% endblock %} diff --git a/bench/templates/partials/pr-chart.html b/bench/templates/partials/pr-chart.html new file mode 100644 index 000000000000..b13ebffcf8c5 --- /dev/null +++ b/bench/templates/partials/pr-chart.html @@ -0,0 +1,134 @@ +
+ + diff --git a/bench/templates/partials/theme-toggle.html b/bench/templates/partials/theme-toggle.html new file mode 100644 index 000000000000..e0082e1a3958 --- /dev/null +++ b/bench/templates/partials/theme-toggle.html @@ -0,0 +1,25 @@ + + + diff --git a/bench/templates/pr-report.html b/bench/templates/pr-report.html new file mode 100644 index 000000000000..3e968325406b --- /dev/null +++ b/bench/templates/pr-report.html @@ -0,0 +1,119 @@ +{% extends 'base.html' %} + +{% block title %}Benchmark Results{% endblock %} + +{% block content %} +
+

Benchmark Results

+
+

{{ title }}

+ +

Run Data

+
+ + + + + + + + + + + + {% for run in runs %} + + + + + + + + {% endfor %} + +
ConfigMean (s)Std DevUser (s)System (s)
{{ run.config_display }}{{ "%.3f"|format(run.mean) }}{{ "%.3f"|format(run.stddev) if run.stddev else "N/A" }}{{ "%.3f"|format(run.user) }}{{ "%.3f"|format(run.system) }}
+
+ + {% if nightly_comparison %} +

Comparison to Nightly Master

+
+ + + + + + + + + + + {% for config, data in nightly_comparison|dictsort %} + + + + + + + {% endfor %} + +
ConfigPR TimeNightly Median (Date Range)Change
{{ data.config_display }}{{ "%.1f"|format(data.pr_mean / 60) }} min + {% if data.nightly_mean %} + {{ "%.1f"|format(data.nightly_mean / 60) }} min + (median of {{ data.nightly_count }}, {{ data.nightly_date_range }}) + {% else %} + No baseline + {% endif %} + + {% if data.speedup_percent is not none %} + {% if data.speedup_percent > 0 %} + +{{ data.speedup_percent }}% + {% elif data.speedup_percent < 0 %} + {{ data.speedup_percent }}% + {% else %} + {{ data.speedup_percent }}% + {% endif %} + {% else %} + N/A + {% endif %} +
+
+ + {% if pr_chart_data %} +

Performance Trend

+
+ {% include 'partials/pr-chart.html' %} +
+ {% endif %} + + {% else %} +
+

No nightly baseline data available for comparison.

+
+ {% endif %} + + {% if graphs %} +

Flamegraphs and Charts

+ {% if ci_run_url %} +

+ Debug logs available as CI artifacts + (expires after 90 days) +

+ {% endif %} + {% for graph in graphs %} +
+

{{ graph.label }}

+ {% if graph.flamegraph %} + + {% endif %} + {% if graph.plots %} + {% for plot in graph.plots %} + {{ graph.label }} + {% endfor %} + {% endif %} +
+ {% endfor %} + {% endif %} + +
+
+{% endblock %} diff --git a/bench/templates/results-index.html b/bench/templates/results-index.html new file mode 100644 index 000000000000..5a39cbee3672 --- /dev/null +++ b/bench/templates/results-index.html @@ -0,0 +1,23 @@ +{% extends 'base.html' %} + +{% block title %}Bitcoin Benchmark Results{% endblock %} + +{% block content %} +
+

Bitcoin Benchmark Results

+
+

Available Results

+
    + {% for pr_num, pr_runs in results %} +
  • PR #{{ pr_num }} + +
  • + {% endfor %} +
+
+
+{% endblock %} diff --git a/bench/utils.py b/bench/utils.py new file mode 100644 index 000000000000..11d1c0cb18b2 --- /dev/null +++ b/bench/utils.py @@ -0,0 +1,103 @@ +"""Utility functions for git operations.""" + +from __future__ import annotations + +import logging +import subprocess +from pathlib import Path + +logger = logging.getLogger(__name__) + + +class GitState: + """Saved git state for restoration after operations.""" + + def __init__(self, repo_path: Path | None = None): + self.repo_path = repo_path or Path.cwd() + self.original_branch: str | None = None + self.original_commit: str | None = None + self.was_detached: bool = False + + def save(self) -> None: + """Save current git state.""" + # Check if we're on a branch or detached HEAD + result = subprocess.run( + ["git", "symbolic-ref", "--short", "HEAD"], + capture_output=True, + text=True, + cwd=self.repo_path, + ) + + if result.returncode == 0: + self.original_branch = result.stdout.strip() + self.was_detached = False + else: + # Detached HEAD - save commit hash + result = subprocess.run( + ["git", "rev-parse", "HEAD"], + capture_output=True, + text=True, + check=True, + cwd=self.repo_path, + ) + self.original_commit = result.stdout.strip() + self.was_detached = True + + logger.debug( + f"Saved git state: branch={self.original_branch}, " + f"commit={self.original_commit}, detached={self.was_detached}" + ) + + def restore(self) -> None: + """Restore saved git state.""" + if self.original_branch: + logger.debug(f"Restoring branch: {self.original_branch}") + subprocess.run( + ["git", "checkout", self.original_branch], + check=True, + cwd=self.repo_path, + ) + elif self.original_commit: + logger.debug(f"Restoring detached HEAD: {self.original_commit}") + subprocess.run( + ["git", "checkout", self.original_commit], + check=True, + cwd=self.repo_path, + ) + + +class GitError(Exception): + """Git operation failed.""" + + +def git_checkout(commit: str, repo_path: Path | None = None) -> None: + """Checkout a specific commit.""" + repo_path = repo_path or Path.cwd() + logger.info(f"Checking out {commit[:12]}") + + result = subprocess.run( + ["git", "checkout", commit], + cwd=repo_path, + capture_output=True, + text=True, + ) + + if result.returncode != 0: + raise GitError(f"Failed to checkout {commit}: {result.stderr}") + + +def git_rev_parse(ref: str, repo_path: Path | None = None) -> str: + """Resolve a git reference to a full commit hash.""" + repo_path = repo_path or Path.cwd() + + result = subprocess.run( + ["git", "rev-parse", ref], + cwd=repo_path, + capture_output=True, + text=True, + ) + + if result.returncode != 0: + raise GitError(f"Failed to resolve {ref}: {result.stderr}") + + return result.stdout.strip() diff --git a/doc/benchcoin.md b/doc/benchcoin.md new file mode 100644 index 000000000000..d16ebaac8e89 --- /dev/null +++ b/doc/benchcoin.md @@ -0,0 +1,181 @@ +# benchcoin + +A Bitcoin Core benchmarking fork with automated IBD performance tracking. + +## Overview + +This repository is a fork of Bitcoin Core that performs automated IBD (Initial Block Download) benchmarking. It measures and compares the performance impact of changes to Bitcoin Core's codebase using reproducible, long-running benchmarks. + +**Live Results:** [bitcoin-dev-tools.github.io/benchcoin](https://bitcoin-dev-tools.github.io/benchcoin) + +## Features + +- **Nightly Benchmarks** - Daily performance tracking of master branch +- **PR Benchmarks** - Automated comparison of base vs head on pull requests +- **Multiple Configurations:** + - Default cache (450 MB dbcache) + - Large cache (32 GB dbcache) + - Instrumented mode (flamegraphs + debug logging) +- **Performance Visualizations:** + - Interactive Plotly charts for nightly trends + - CPU flamegraphs with Bitcoin-specific coloring + - Time series plots (block height, cache size, tx count, LevelDB metrics) +- **Reproducible Configs** - TOML config files capture all benchmark parameters + +## Example Flamegraph + +![Example Flamegraph](../doc/flamegraph.svg) + +## How to Use + +### Benchmark a PR + +1. Open a Pull Request against **this repo** (not bitcoin/bitcoin) +2. Wait for the benchmark workflow to complete +3. Results are posted as a PR comment with link to detailed report + +### Benchmark an Existing bitcoin/bitcoin PR + +```bash +# Requires 'just' (https://github.com/casey/just) +just pick-pr # Cherry-pick commits from bitcoin/bitcoin PR +git push origin HEAD +# Open PR against this repo +``` + +## Benchmark Configurations + +All benchmark parameters are defined in config files at `bench/configs/`: + +| Config | Matrix Entries | Runs | Use Case | +|--------|---------------|------|----------| +| `nightly.toml` | 450, 32000 | 2 | Nightly tracking | +| `pr.toml` | 450-false, 450-true, 32000-false, 32000-true | 3 | PR comparison | + +Matrix entries are generated from `[bitcoind.matrix]` values (e.g., `dbcache = [450, 32000]`). +Both configs benchmark blocks 840,000 → 855,000 from a dedicated sync peer. + +## Benchmark Outputs + +### Nightly (Homepage) + +- Interactive chart showing sync time trends over time +- Machine specs and config stored with results +- Accessible at the repository's GitHub Pages root + +### PR Reports + +- Timing comparison (mean, stddev, speedup %) +- CPU flamegraphs (instrumented mode) +- Performance plots (instrumented mode) +- Accessible at `/results/pr-N/run-id/` + +## Local Development + +### Prerequisites + +- [Nix](https://nixos.org/download/) with flakes enabled +- Blockchain datadir snapshot (pruned at block 840,000) + +### Quick Start + +```bash +# Enter nix environment +nix develop + +# Build binaries +python3 bench.py build HEAD~1:base HEAD:head + +# Run benchmark with config +python3 bench.py run \ + --benchmark-config bench/configs/pr.toml \ + --matrix-entry 450-false \ + --datadir /path/to/pruned-840k \ + base:./binaries/base/bitcoind \ + head:./binaries/head/bitcoind +``` + +### Just Recipes + +```bash +just test-uninstrumented HEAD~1 HEAD /path/to/datadir # Quick smoke test +just test-instrumented HEAD~1 HEAD /path/to/datadir # With flamegraphs +just build HEAD~1:base HEAD:head # Build only +just pick-pr 12345 # Cherry-pick PR +``` + +## Technical Details + +### Tools + +- [Hyperfine](https://github.com/sharkdp/hyperfine) - Benchmark timing +- [Flamegraph](https://github.com/willcl-ark/flamegraph) - CPU profiling (Bitcoin fork with custom palette) +- [Plotly.js](https://plotly.com/javascript/) - Interactive charts +- [matplotlib](https://matplotlib.org/) - Performance plots + +### CI Infrastructure + +**Runner:** Self-hosted on Hetzner AX52 (16 cores, NixOS) +- 1 core for system +- 1 core for perf/flamegraph +- 14 cores for bitcoind + +**Sync Peer:** Dedicated Hetzner VPS serving blocks over network to exercise full IBD codepaths. + +Configuration repos: +- Runner: [nix-github-runner](https://github.com/bitcoin-dev-tools/nix-github-runner) +- Seed: [nix-seed-node](https://github.com/bitcoin-dev-tools/nix-seed-node) + +### Workflow Overview + +``` +Daily 5:00 AM GMT + │ + ▼ + rebase.yml (rebase on upstream) + │ + ▼ + nightly-benchmark.yml + │ + ├─► Build master + ├─► Benchmark (default + large configs) + ├─► Append to nightly-history.json + └─► Generate homepage chart + +PR opened + │ + ▼ + benchmark.yml + │ + ├─► Build base + head + └─► Benchmark (4 matrix configs) + │ + ▼ + publish-results.yml + │ + ├─► Generate HTML report + ├─► Commit to gh-pages + └─► Post PR comment +``` + +### Results Storage + +Results are stored on the `gh-pages` branch: + +``` +/ +├── index.html # Nightly chart (homepage) +├── nightly-history.json # Historical data + machine specs + config +└── results/ + ├── index.html # PR results index + └── pr-// # Individual PR reports +``` + +The `nightly-history.json` captures: +- Benchmark config (heights, peer, dbcache values, command template) +- Machine specs (CPU, cores, RAM, disk type, kernel) +- Results (date, commit, mean, stddev, runs) + +## License + +This project is licensed under the same terms as Bitcoin Core - see the [COPYING](../COPYING) file for details. diff --git a/doc/flamegraph.svg b/doc/flamegraph.svg new file mode 100644 index 000000000000..77f05068edd1 --- /dev/null +++ b/doc/flamegraph.svg @@ -0,0 +1,491 @@ +bitcoind assumeutxo IBD@head Reset ZoomSearch [unknown] (930,216,305 samples, 0.03%)libc.so.6::__GI___libc_open (1,277,437,934 samples, 0.04%)[unknown] (1,277,437,934 samples, 0.04%)[unknown] (1,121,698,471 samples, 0.03%)[unknown] (1,121,698,471 samples, 0.03%)[unknown] (1,121,698,471 samples, 0.03%)[unknown] (808,723,138 samples, 0.02%)[unknown] (705,370,773 samples, 0.02%)[unknown] (654,247,113 samples, 0.02%)[unknown] (601,840,190 samples, 0.02%)[unknown] (412,286,776 samples, 0.01%)libc.so.6::__lll_lock_wait_private (3,169,140,832 samples, 0.09%)[unknown] (3,068,852,192 samples, 0.09%)[unknown] (2,912,247,498 samples, 0.08%)[unknown] (2,859,869,350 samples, 0.08%)[unknown] (2,547,374,665 samples, 0.07%)[unknown] (2,442,338,234 samples, 0.07%)[unknown] (2,018,530,007 samples, 0.06%)[unknown] (1,768,059,272 samples, 0.05%)[unknown] (1,360,516,543 samples, 0.04%)[unknown] (941,780,033 samples, 0.03%)[unknown] (732,126,125 samples, 0.02%)[unknown] (367,091,733 samples, 0.01%)libc.so.6::__lll_lock_wake_private (53,149,822,463 samples, 1.49%)l..[unknown] (52,891,684,033 samples, 1.49%)[..[unknown] (51,489,363,011 samples, 1.45%)[..[unknown] (51,020,482,662 samples, 1.43%)[..[unknown] (46,915,115,303 samples, 1.32%)[unknown] (45,255,852,290 samples, 1.27%)[unknown] (38,150,418,340 samples, 1.07%)[unknown] (35,292,486,865 samples, 0.99%)[unknown] (7,892,404,247 samples, 0.22%)[unknown] (3,327,749,547 samples, 0.09%)[unknown] (1,188,855,625 samples, 0.03%)[unknown] (566,758,595 samples, 0.02%)libc.so.6::_int_free_create_chunk (628,326,946 samples, 0.02%)libc.so.6::_int_free_merge_chunk (358,656,602 samples, 0.01%)libc.so.6::_int_malloc (74,559,659,927 samples, 2.10%)li..[unknown] (721,620,417 samples, 0.02%)[unknown] (610,988,583 samples, 0.02%)[unknown] (610,988,583 samples, 0.02%)[unknown] (610,988,583 samples, 0.02%)[unknown] (559,250,914 samples, 0.02%)[unknown] (559,250,914 samples, 0.02%)libc.so.6::alloc_perturb (425,154,213 samples, 0.01%)libc.so.6::malloc (24,700,554,078 samples, 0.69%)libc.so.6::malloc_consolidate (735,996,757 samples, 0.02%)libc.so.6::unlink_chunk.isra.0 (6,120,352,373 samples, 0.17%)[unknown] (167,607,884,597 samples, 4.71%)[unknown]libstdc++.so.6.0.32::virtual thunk to std::__cxx11::basic_ostringstream<char, std::char_traits<char>, std::allocator<char> >::~basic_ostringstream (417,178,495 samples, 0.01%)[unknown] (417,178,495 samples, 0.01%)libc.so.6::_IO_default_xsputn (371,898,668 samples, 0.01%)libc.so.6::_IO_do_write@@GLIBC_2.2.5 (415,186,042 samples, 0.01%)libc.so.6::_IO_file_xsputn@@GLIBC_2.2.5 (52,841,892,362 samples, 1.49%)l..libc.so.6::_IO_fwrite (157,971,658,633 samples, 4.44%)libc.so...[[ext4]] (1,657,432,113 samples, 0.05%)[unknown] (573,069,492 samples, 0.02%)[[ext4]] (2,536,153,731 samples, 0.07%)[[ext4]] (10,537,322,599 samples, 0.30%)[unknown] (7,422,408,080 samples, 0.21%)[unknown] (6,329,696,449 samples, 0.18%)[unknown] (5,353,636,150 samples, 0.15%)[unknown] (5,041,980,997 samples, 0.14%)[unknown] (3,383,888,214 samples, 0.10%)[unknown] (1,348,486,405 samples, 0.04%)[unknown] (477,579,410 samples, 0.01%)[unknown] (424,961,857 samples, 0.01%)[[ext4]] (48,707,811,335 samples, 1.37%)[..[unknown] (37,296,429,178 samples, 1.05%)[unknown] (35,118,068,672 samples, 0.99%)[unknown] (29,610,843,695 samples, 0.83%)[unknown] (24,208,827,110 samples, 0.68%)[unknown] (17,096,181,771 samples, 0.48%)[unknown] (6,112,761,166 samples, 0.17%)[unknown] (1,344,893,459 samples, 0.04%)[unknown] (458,831,632 samples, 0.01%)[[ext4]] (365,017,200 samples, 0.01%)[[ext4]] (518,180,627 samples, 0.01%)[[ext4]] (466,259,788 samples, 0.01%)[[ext4]] (673,383,386 samples, 0.02%)[[ext4]] (59,764,846,104 samples, 1.68%)[..[unknown] (58,060,722,922 samples, 1.63%)[..[unknown] (7,950,480,723 samples, 0.22%)[unknown] (5,540,377,500 samples, 0.16%)[unknown] (865,590,582 samples, 0.02%)[unknown] (813,212,612 samples, 0.02%)[unknown] (813,212,612 samples, 0.02%)[unknown] (813,212,612 samples, 0.02%)[unknown] (711,368,524 samples, 0.02%)libc.so.6::__GI___libc_write (70,786,161,691 samples, 1.99%)li..[unknown] (70,568,950,557 samples, 1.98%)[u..[unknown] (69,379,113,892 samples, 1.95%)[u..[unknown] (68,772,280,665 samples, 1.93%)[u..[unknown] (66,697,097,059 samples, 1.88%)[u..[unknown] (3,800,961,354 samples, 0.11%)[unknown] (780,895,718 samples, 0.02%)libc.so.6::__memmove_avx512_unaligned_erms (15,769,232,267 samples, 0.44%)libc.so.6::__mempcpy@plt (4,938,637,189 samples, 0.14%)libc.so.6::__send (1,149,037,952 samples, 0.03%)[unknown] (1,149,037,952 samples, 0.03%)[unknown] (1,149,037,952 samples, 0.03%)[unknown] (1,149,037,952 samples, 0.03%)[unknown] (1,096,533,096 samples, 0.03%)[unknown] (1,096,533,096 samples, 0.03%)[unknown] (1,096,533,096 samples, 0.03%)[unknown] (1,094,640,456 samples, 0.03%)[unknown] (943,771,904 samples, 0.03%)[unknown] (626,496,659 samples, 0.02%)[unknown] (522,399,654 samples, 0.01%)[unknown] (469,549,544 samples, 0.01%)[unknown] (469,549,544 samples, 0.01%)[unknown] (366,321,373 samples, 0.01%)libc.so.6::_int_free (16,918,597,179 samples, 0.48%)libc.so.6::_int_free_merge_chunk (716,678,677 samples, 0.02%)libc.so.6::_int_malloc (1,269,524,481 samples, 0.04%)libc.so.6::cfree@GLIBC_2.2.5 (4,352,992,616 samples, 0.12%)libc.so.6::malloc (8,032,159,513 samples, 0.23%)libc.so.6::malloc_consolidate (39,479,511,598 samples, 1.11%)[unknown] (401,333,554 samples, 0.01%)[unknown] (401,333,554 samples, 0.01%)[unknown] (401,333,554 samples, 0.01%)[unknown] (401,333,554 samples, 0.01%)[unknown] (401,333,554 samples, 0.01%)[unknown] (401,333,554 samples, 0.01%)libc.so.6::new_do_write (469,906,341 samples, 0.01%)libc.so.6::read (459,442,054 samples, 0.01%)[unknown] (459,442,054 samples, 0.01%)[unknown] (360,200,514 samples, 0.01%)[unknown] (360,200,514 samples, 0.01%)[unknown] (360,200,514 samples, 0.01%)[unknown] (360,200,514 samples, 0.01%)libc.so.6::sysmalloc (469,717,952 samples, 0.01%)[unknown] (469,717,952 samples, 0.01%)[unknown] (415,893,983 samples, 0.01%)[unknown] (366,135,265 samples, 0.01%)[unknown] (366,135,265 samples, 0.01%)libc.so.6::unlink_chunk.isra.0 (2,862,604,776 samples, 0.08%)bitcoind::CBlockIndex::GetAncestor (412,360,660 samples, 0.01%)bitcoind::CCoinsViewCache::AccessCoin (421,783,849 samples, 0.01%)bitcoind::SipHashUint256Extra (6,150,872,313 samples, 0.17%)bitcoind::std::_Hashtable<COutPoint, std::pair<COutPoint const, CCoinsCacheEntry>, PoolAllocator<std::pair<COutPoint const, CCoinsCacheEntry>, 144ul, 8ul>, std::__detail::_Select1st, std::equal_to<COutPoint>, SaltedOutpointHasher, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits<false, false, true> >::_M_erase (100,736,697,557 samples, 2.83%)bitc..bitcoind::SipHashUint256Extra (1,991,693,392 samples, 0.06%)bitcoind::std::_Hashtable<COutPoint, std::pair<COutPoint const, CCoinsCacheEntry>, PoolAllocator<std::pair<COutPoint const, CCoinsCacheEntry>, 144ul, 8ul>, std::__detail::_Select1st, std::equal_to<COutPoint>, SaltedOutpointHasher, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits<false, false, true> >::_M_find_before_node (90,084,545,787 samples, 2.53%)bit..bitcoind::SipHashUint256Extra (71,251,854,599 samples, 2.00%)bi..bitcoind::SipHashUint256Extra (26,794,756,611 samples, 0.75%)bitcoind::std::_Hashtable<COutPoint, std::pair<COutPoint const, CCoinsCacheEntry>, PoolAllocator<std::pair<COutPoint const, CCoinsCacheEntry>, 144ul, 8ul>, std::__detail::_Select1st, std::equal_to<COutPoint>, SaltedOutpointHasher, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits<false, false, true> >::_M_insert_unique_node (46,369,997,648 samples, 1.30%)bitcoind::std::_Hashtable<COutPoint, std::pair<COutPoint const, CCoinsCacheEntry>, PoolAllocator<std::pair<COutPoint const, CCoinsCacheEntry>, 144ul, 8ul>, std::__detail::_Select1st, std::equal_to<COutPoint>, SaltedOutpointHasher, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits<false, false, true> >::_M_rehash (18,471,505,609 samples, 0.52%)libc.so.6::__memset_avx512_unaligned_erms (632,105,655 samples, 0.02%)[unknown] (579,371,219 samples, 0.02%)[unknown] (474,387,191 samples, 0.01%)[unknown] (421,585,797 samples, 0.01%)[unknown] (421,585,797 samples, 0.01%)[unknown] (368,759,434 samples, 0.01%)[unknown] (368,759,434 samples, 0.01%)[unknown] (368,759,434 samples, 0.01%)bitcoind::std::_Hashtable<COutPoint, std::pair<COutPoint const, CCoinsCacheEntry>, PoolAllocator<std::pair<COutPoint const, CCoinsCacheEntry>, 144ul, 8ul>, std::__detail::_Select1st, std::equal_to<COutPoint>, SaltedOutpointHasher, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits<false, false, true> >::erase (1,518,987,687 samples, 0.04%)bitcoind::SipHashUint256Extra (625,645,482 samples, 0.02%)bitcoind::SipHashUint256Extra (6,692,957,315 samples, 0.19%)[unknown] (1,036,177,296 samples, 0.03%)[unknown] (928,879,608 samples, 0.03%)[unknown] (877,183,919 samples, 0.02%)[unknown] (719,026,447 samples, 0.02%)[unknown] (666,701,067 samples, 0.02%)[unknown] (626,005,752 samples, 0.02%)[unknown] (364,282,815 samples, 0.01%)[unknown] (364,282,815 samples, 0.01%)[unknown] (364,282,815 samples, 0.01%)[unknown] (364,282,815 samples, 0.01%)bitcoind::std::_Hashtable<COutPoint, std::pair<COutPoint const, CCoinsCacheEntry>, PoolAllocator<std::pair<COutPoint const, CCoinsCacheEntry>, 144ul, 8ul>, std::__detail::_Select1st, std::equal_to<COutPoint>, SaltedOutpointHasher, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits<false, false, true> >::find (133,163,328,034 samples, 3.75%)bitcoi..bitcoind::std::_Hashtable<COutPoint, std::pair<COutPoint const, CCoinsCacheEntry>, PoolAllocator<std::pair<COutPoint const, CCoinsCacheEntry>, 144ul, 8ul>, std::__detail::_Select1st, std::equal_to<COutPoint>, SaltedOutpointHasher, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits<false, false, true> >::_M_find_before_node (119,438,100,972 samples, 3.36%)bitco..bitcoind::SipHashUint256Extra (986,497,657 samples, 0.03%)bitcoind::std::__detail::_Hash_node<std::pair<COutPoint const, CCoinsCacheEntry>, false>* std::__detail::_Hashtable_alloc<PoolAllocator<std::__detail::_Hash_node<std::pair<COutPoint const, CCoinsCacheEntry>, false>, 144ul, 8ul> >::_M_allocate_node<std::piecewise_construct_t const&, std::tuple<COutPoint const&>, std::tuple<> > (5,414,052,109 samples, 0.15%)libc.so.6::cfree@GLIBC_2.2.5 (4,527,272,747 samples, 0.13%)bitcoind::CCoinsViewCache::BatchWrite (408,297,908,928 samples, 11.48%)bitcoind::CCoinsViewCac..bitcoind::std::_Hashtable<COutPoint, std::pair<COutPoint const, CCoinsCacheEntry>, PoolAllocator<std::pair<COutPoint const, CCoinsCacheEntry>, 144ul, 8ul>, std::__detail::_Select1st, std::equal_to<COutPoint>, SaltedOutpointHasher, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits<false, false, true> >::clear (4,431,167,402 samples, 0.12%)bitcoind::std::_Hashtable<COutPoint, std::pair<COutPoint const, CCoinsCacheEntry>, PoolAllocator<std::pair<COutPoint const, CCoinsCacheEntry>, 144ul, 8ul>, std::__detail::_Select1st, std::equal_to<COutPoint>, SaltedOutpointHasher, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits<false, false, true> >::find (676,133,697 samples, 0.02%)bitcoind::CCoinsViewCache::Flush (414,604,793,420 samples, 11.66%)bitcoind::CCoinsViewCach..bitcoind::CTxMemPool::removeConflicts (1,307,189,422 samples, 0.04%)bitcoind::std::_Rb_tree<COutPoint const*, std::pair<COutPoint const* const, CTransaction const*>, std::_Select1st<std::pair<COutPoint const* const, CTransaction const*> >, DereferencingComparator<COutPoint const*>, std::allocator<std::pair<COutPoint const* const, CTransaction const*> > >::find (940,298,479 samples, 0.03%)bitcoind::SipHashUint256 (1,301,282,993 samples, 0.04%)bitcoind::std::_Rb_tree<uint256, std::pair<uint256 const, long>, std::_Select1st<std::pair<uint256 const, long> >, std::less<uint256>, std::allocator<std::pair<uint256 const, long> > >::_M_erase (1,201,625,005 samples, 0.03%)bitcoind::CTxMemPool::removeForBlock (17,028,655,239 samples, 0.48%)bitcoind::std::_Rb_tree<uint256, std::pair<uint256 const, long>, std::_Select1st<std::pair<uint256 const, long> >, std::less<uint256>, std::allocator<std::pair<uint256 const, long> > >::erase (12,855,923,134 samples, 0.36%)bitcoind::std::_Rb_tree<uint256, std::pair<uint256 const, long>, std::_Select1st<std::pair<uint256 const, long> >, std::less<uint256>, std::allocator<std::pair<uint256 const, long> > >::equal_range (2,508,971,022 samples, 0.07%)[unknown] (3,441,479,431 samples, 0.10%)[unknown] (3,089,709,936 samples, 0.09%)[unknown] (2,820,174,820 samples, 0.08%)[unknown] (2,720,356,939 samples, 0.08%)[unknown] (2,720,356,939 samples, 0.08%)[unknown] (2,557,087,196 samples, 0.07%)[unknown] (2,356,775,337 samples, 0.07%)[unknown] (1,672,816,080 samples, 0.05%)[unknown] (1,100,674,926 samples, 0.03%)[unknown] (787,217,059 samples, 0.02%)[unknown] (574,492,426 samples, 0.02%)bitcoind::SipHashUint256Extra (359,543,734 samples, 0.01%)bitcoind::std::_Hashtable<COutPoint, std::pair<COutPoint const, CCoinsCacheEntry>, PoolAllocator<std::pair<COutPoint const, CCoinsCacheEntry>, 144ul, 8ul>, std::__detail::_Select1st, std::equal_to<COutPoint>, SaltedOutpointHasher, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits<false, false, true> >::_M_find_before_node (10,977,516,042 samples, 0.31%)bitcoind::SipHashUint256Extra (3,562,058,963 samples, 0.10%)bitcoind::SipHashUint256Extra (1,836,963,585 samples, 0.05%)bitcoind::SipHashUint256Extra (6,867,820,925 samples, 0.19%)bitcoind::std::_Hashtable<COutPoint, std::pair<COutPoint const, CCoinsCacheEntry>, PoolAllocator<std::pair<COutPoint const, CCoinsCacheEntry>, 144ul, 8ul>, std::__detail::_Select1st, std::equal_to<COutPoint>, SaltedOutpointHasher, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits<false, false, true> >::_M_insert_unique_node (16,890,522,357 samples, 0.48%)bitcoind::std::_Hashtable<COutPoint, std::pair<COutPoint const, CCoinsCacheEntry>, PoolAllocator<std::pair<COutPoint const, CCoinsCacheEntry>, 144ul, 8ul>, std::__detail::_Select1st, std::equal_to<COutPoint>, SaltedOutpointHasher, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits<false, false, true> >::_M_rehash (12,768,158,119 samples, 0.36%)bitcoind::std::__detail::_Hash_node<std::pair<COutPoint const, CCoinsCacheEntry>, false>* std::__detail::_Hashtable_alloc<PoolAllocator<std::__detail::_Hash_node<std::pair<COutPoint const, CCoinsCacheEntry>, false>, 144ul, 8ul> >::_M_allocate_node<std::piecewise_construct_t const&, std::tuple<COutPoint const&>, std::tuple<> > (6,083,575,685 samples, 0.17%)[unknown] (2,667,289,880 samples, 0.08%)[unknown] (2,453,773,220 samples, 0.07%)[unknown] (2,293,236,868 samples, 0.06%)[unknown] (2,189,852,142 samples, 0.06%)[unknown] (1,978,814,058 samples, 0.06%)[unknown] (1,713,021,112 samples, 0.05%)[unknown] (1,360,558,892 samples, 0.04%)[unknown] (1,099,770,850 samples, 0.03%)[unknown] (785,095,967 samples, 0.02%)[unknown] (468,560,942 samples, 0.01%)[unknown] (366,515,283 samples, 0.01%)bitcoind::CCoinsViewCache::AddCoin (67,517,205,631 samples, 1.90%)bi..bitcoind::AddCoins (83,151,504,659 samples, 2.34%)bit..bitcoind::std::__detail::_Hash_node<std::pair<COutPoint const, CCoinsCacheEntry>, false>* std::__detail::_Hashtable_alloc<PoolAllocator<std::__detail::_Hash_node<std::pair<COutPoint const, CCoinsCacheEntry>, false>, 144ul, 8ul> >::_M_allocate_node<std::piecewise_construct_t const&, std::tuple<COutPoint const&>, std::tuple<> > (368,308,911 samples, 0.01%)bitcoind::CBlockIndex::GetAncestor (780,828,411 samples, 0.02%)bitcoind::SipHashUint256Extra (6,967,127,022 samples, 0.20%)bitcoind::CCoinsViewCache::FetchCoin (11,631,656,359 samples, 0.33%)bitcoind::std::_Hashtable<COutPoint, std::pair<COutPoint const, CCoinsCacheEntry>, PoolAllocator<std::pair<COutPoint const, CCoinsCacheEntry>, 144ul, 8ul>, std::__detail::_Select1st, std::equal_to<COutPoint>, SaltedOutpointHasher, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits<false, false, true> >::_M_find_before_node (2,762,447,333 samples, 0.08%)bitcoind::CCoinsViewCache::AccessCoin (13,718,933,582 samples, 0.39%)bitcoind::CCoinsViewCache::AddCoin (935,848,977 samples, 0.03%)bitcoind::CCoinsViewCache::HaveInputs (363,967,847 samples, 0.01%)bitcoind::CCoinsViewCache::SpendCoin (775,446,488 samples, 0.02%)bitcoind::CTransaction::GetValueOut (571,129,594 samples, 0.02%)bitcoind::SipHashUint256Extra (6,132,196,838 samples, 0.17%)bitcoind::CCoinsViewCache::FetchCoin (22,771,955,106 samples, 0.64%)bitcoind::std::_Hashtable<COutPoint, std::pair<COutPoint const, CCoinsCacheEntry>, PoolAllocator<std::pair<COutPoint const, CCoinsCacheEntry>, 144ul, 8ul>, std::__detail::_Select1st, std::equal_to<COutPoint>, SaltedOutpointHasher, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits<false, false, true> >::_M_find_before_node (8,387,260,917 samples, 0.24%)bitcoind::SipHashUint256Extra (672,360,582 samples, 0.02%)bitcoind::CCoinsViewCache::AccessCoin (27,541,380,041 samples, 0.77%)bitcoind::std::_Hashtable<COutPoint, std::pair<COutPoint const, CCoinsCacheEntry>, PoolAllocator<std::pair<COutPoint const, CCoinsCacheEntry>, 144ul, 8ul>, std::__detail::_Select1st, std::equal_to<COutPoint>, SaltedOutpointHasher, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits<false, false, true> >::_M_find_before_node (840,128,595 samples, 0.02%)bitcoind::CCoinsViewCache::FetchCoin (9,862,576,991 samples, 0.28%)bitcoind::CCoinsViewCache::FetchCoin (723,258,358 samples, 0.02%)bitcoind::CCoinsViewBacked::GetCoin (1,001,559,892 samples, 0.03%)bitcoind::leveldb::LookupKey::LookupKey (468,932,422 samples, 0.01%)bitcoind::leveldb::(anonymous namespace)::BytewiseComparatorImpl::Compare (477,889,771 samples, 0.01%)bitcoind::leveldb::(anonymous namespace)::BytewiseComparatorImpl::Compare (2,464,437,114 samples, 0.07%)bitcoind::leveldb::FindFile (12,889,348,897 samples, 0.36%)bitcoind::leveldb::InternalKeyComparator::Compare (8,952,657,039 samples, 0.25%)libc.so.6::__memcmp_evex_movbe (3,658,168,717 samples, 0.10%)bitcoind::leveldb::InternalKeyComparator::Compare (468,603,758 samples, 0.01%)bitcoind::leveldb::(anonymous namespace)::ShardedLRUCache::Lookup (2,481,353,143 samples, 0.07%)[unknown] (470,703,247 samples, 0.01%)[unknown] (419,110,322 samples, 0.01%)[unknown] (367,081,554 samples, 0.01%)[unknown] (367,081,554 samples, 0.01%)bitcoind::std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >::_M_mutate (723,558,367 samples, 0.02%)libc.so.6::__memmove_avx512_unaligned_erms (682,634,544 samples, 0.02%)bitcoind::leveldb::Block::Iter::ParseNextKey (6,607,693,428 samples, 0.19%)libc.so.6::malloc (468,621,157 samples, 0.01%)bitcoind::leveldb::(anonymous namespace)::BytewiseComparatorImpl::Compare (3,736,764,421 samples, 0.11%)bitcoind::leveldb::InternalKeyComparator::Compare (22,752,758,306 samples, 0.64%)libc.so.6::__memcmp_evex_movbe (16,502,022,326 samples, 0.46%)bitcoind::leveldb::Block::Iter::Seek (81,753,854,146 samples, 2.30%)bit..libc.so.6::__memmove_avx512_unaligned_erms (624,754,079 samples, 0.02%)bitcoind::leveldb::Block::Iter::~Iter (1,202,042,453 samples, 0.03%)bitcoind::leveldb::Iterator::~Iterator (886,809,043 samples, 0.02%)bitcoind::leveldb::DeleteBlock (418,661,180 samples, 0.01%)bitcoind::leveldb::Block::NewIterator (1,830,741,267 samples, 0.05%)bitcoind::leveldb::BlockHandle::DecodeFrom (1,350,133,609 samples, 0.04%)bitcoind::leveldb::FilterBlockReader::KeyMayMatch (3,241,956,535 samples, 0.09%)bitcoind::leveldb::InternalFilterPolicy::KeyMayMatch (470,469,134 samples, 0.01%)bitcoind::leveldb::(anonymous namespace)::BloomFilterPolicy::KeyMayMatch (470,469,134 samples, 0.01%)bitcoind::leveldb::InternalKeyComparator::Compare (2,930,394,374 samples, 0.08%)bitcoind::leveldb::SaveValue (885,107,264 samples, 0.02%)bitcoind::leveldb::(anonymous namespace)::ShardedLRUCache::Lookup (1,152,034,360 samples, 0.03%)bitcoind::leveldb::Hash (363,890,191 samples, 0.01%)bitcoind::leveldb::Block::NewIterator (1,259,229,813 samples, 0.04%)bitcoind::leveldb::BlockHandle::DecodeFrom (1,156,612,863 samples, 0.03%)bitcoind::leveldb::GetVarint64 (416,693,035 samples, 0.01%)bitcoind::leveldb::Iterator::RegisterCleanup (363,166,691 samples, 0.01%)[unknown] (2,314,123,053 samples, 0.07%)[unknown] (2,156,687,800 samples, 0.06%)[unknown] (2,051,108,413 samples, 0.06%)[unknown] (1,945,393,833 samples, 0.05%)[unknown] (1,894,650,811 samples, 0.05%)[unknown] (1,894,650,811 samples, 0.05%)[unknown] (1,794,842,453 samples, 0.05%)[unknown] (1,315,291,384 samples, 0.04%)[unknown] (733,842,157 samples, 0.02%)[unknown] (421,059,647 samples, 0.01%)[unknown] (367,252,654 samples, 0.01%)bitcoind::crc32c::ExtendSse42 (56,521,776,403 samples, 1.59%)b..bitcoind::leveldb::ReadBlock (62,722,682,079 samples, 1.76%)b..libc.so.6::__GI___pthread_mutex_unlock_usercnt (978,769,336 samples, 0.03%)libc.so.6::cfree@GLIBC_2.2.5 (571,745,263 samples, 0.02%)bitcoind::leveldb::Table::BlockReader (93,027,689,265 samples, 2.62%)bit..libc.so.6::__memmove_avx512_unaligned_erms (525,280,305 samples, 0.01%)bitcoind::leveldb::Table::InternalGet (191,009,481,478 samples, 5.37%)bitcoind::..bitcoind::leveldb::(anonymous namespace)::ShardedLRUCache::Lookup (2,456,558,609 samples, 0.07%)bitcoind::leveldb::Hash (674,476,478 samples, 0.02%)libc.so.6::__GI___pthread_mutex_unlock_usercnt (949,827,762 samples, 0.03%)libc.so.6::__memcmp_evex_movbe (672,469,665 samples, 0.02%)libc.so.6::pthread_mutex_lock@@GLIBC_2.2.5 (770,697,666 samples, 0.02%)bitcoind::leveldb::TableCache::FindTable (5,889,647,371 samples, 0.17%)bitcoind::leveldb::TableCache::Get (199,229,141,358 samples, 5.60%)bitcoind::..bitcoind::leveldb::Version::Get (200,226,855,069 samples, 5.63%)bitcoind::..libc.so.6::__GI___pthread_mutex_unlock_usercnt (733,288,816 samples, 0.02%)bitcoind::leveldb::Version::ForEachOverlapping (215,208,197,899 samples, 6.05%)bitcoind::l..libc.so.6::__memcmp_evex_movbe (359,285,284 samples, 0.01%)bitcoind::leveldb::Version::Get (216,049,507,027 samples, 6.08%)bitcoind::l..bitcoind::leveldb::DBImpl::Get (217,672,929,621 samples, 6.12%)bitcoind::l..libc.so.6::__GI___pthread_mutex_unlock_usercnt (1,861,877,233 samples, 0.05%)bitcoind::CDBWrapper::ReadImpl[abi:cxx11] (221,752,252,623 samples, 6.24%)bitcoind::CD..libc.so.6::pthread_mutex_lock@@GLIBC_2.2.5 (1,748,433,964 samples, 0.05%)bitcoind::DecompressAmount (1,005,313,570 samples, 0.03%)bitcoind::void ScriptCompression::Unser<DataStream> (2,769,444,330 samples, 0.08%)bitcoind::void std::vector<std::byte, zero_after_free_allocator<std::byte> >::_M_range_insert<std::byte const*> (7,911,029,894 samples, 0.22%)libc.so.6::__memmove_avx512_unaligned_erms (416,410,569 samples, 0.01%)bitcoind::CCoinsViewDB::GetCoin (247,131,705,346 samples, 6.95%)bitcoind::CCo..bitcoind::CCoinsViewBacked::GetCoin (251,714,610,750 samples, 7.08%)bitcoind::CCo..bitcoind::CCoinsViewErrorCatcher::GetCoin (257,960,090,912 samples, 7.26%)bitcoind::CCoi..bitcoind::CCoinsViewDB::GetCoin (5,789,812,101 samples, 0.16%)bitcoind::SipHashUint256Extra (686,778,601 samples, 0.02%)[unknown] (1,028,820,936 samples, 0.03%)[unknown] (974,950,139 samples, 0.03%)[unknown] (867,196,862 samples, 0.02%)[unknown] (710,030,298 samples, 0.02%)[unknown] (710,030,298 samples, 0.02%)[unknown] (600,430,034 samples, 0.02%)[unknown] (489,234,171 samples, 0.01%)[unknown] (434,975,120 samples, 0.01%)[unknown] (434,975,120 samples, 0.01%)bitcoind::std::_Hashtable<COutPoint, std::pair<COutPoint const, CCoinsCacheEntry>, PoolAllocator<std::pair<COutPoint const, CCoinsCacheEntry>, 144ul, 8ul>, std::__detail::_Select1st, std::equal_to<COutPoint>, SaltedOutpointHasher, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits<false, false, true> >::_M_find_before_node (29,304,700,539 samples, 0.82%)bitcoind::std::_Hashtable<COutPoint, std::pair<COutPoint const, CCoinsCacheEntry>, PoolAllocator<std::pair<COutPoint const, CCoinsCacheEntry>, 144ul, 8ul>, std::__detail::_Select1st, std::equal_to<COutPoint>, SaltedOutpointHasher, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits<false, false, true> >::_M_insert_unique_node (21,307,639,964 samples, 0.60%)bitcoind::std::_Hashtable<COutPoint, std::pair<COutPoint const, CCoinsCacheEntry>, PoolAllocator<std::pair<COutPoint const, CCoinsCacheEntry>, 144ul, 8ul>, std::__detail::_Select1st, std::equal_to<COutPoint>, SaltedOutpointHasher, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits<false, false, true> >::_M_rehash (20,780,827,998 samples, 0.58%)libc.so.6::__memset_avx512_unaligned_erms (579,451,231 samples, 0.02%)[unknown] (579,451,231 samples, 0.02%)[unknown] (526,649,228 samples, 0.01%)[unknown] (526,649,228 samples, 0.01%)[unknown] (526,649,228 samples, 0.01%)[unknown] (473,772,435 samples, 0.01%)[unknown] (420,996,348 samples, 0.01%)[unknown] (368,735,591 samples, 0.01%)[unknown] (368,735,591 samples, 0.01%)bitcoind::std::__detail::_Hash_node<std::pair<COutPoint const, CCoinsCacheEntry>, false>* std::__detail::_Hashtable_alloc<PoolAllocator<std::__detail::_Hash_node<std::pair<COutPoint const, CCoinsCacheEntry>, false>, 144ul, 8ul> >::_M_allocate_node<std::piecewise_construct_t const&, std::tuple<COutPoint const&>, std::tuple<> > (4,934,629,385 samples, 0.14%)[unknown] (421,130,280 samples, 0.01%)[unknown] (368,737,467 samples, 0.01%)[unknown] (368,737,467 samples, 0.01%)bitcoind::CCoinsViewCache::FetchCoin (327,425,895,563 samples, 9.21%)bitcoind::CCoinsVi..bitcoind::CCoinsViewErrorCatcher::GetCoin (601,145,923 samples, 0.02%)bitcoind::CCoinsViewCache::GetCoin (349,247,006,292 samples, 9.82%)bitcoind::CCoinsView..bitcoind::SipHashUint256Extra (17,454,209,723 samples, 0.49%)bitcoind::std::_Hashtable<COutPoint, std::pair<COutPoint const, CCoinsCacheEntry>, PoolAllocator<std::pair<COutPoint const, CCoinsCacheEntry>, 144ul, 8ul>, std::__detail::_Select1st, std::equal_to<COutPoint>, SaltedOutpointHasher, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits<false, false, true> >::_M_find_before_node (22,697,810,020 samples, 0.64%)bitcoind::SipHashUint256Extra (4,124,049,750 samples, 0.12%)bitcoind::SipHashUint256Extra (4,306,133,540 samples, 0.12%)bitcoind::SipHashUint256Extra (7,085,914,542 samples, 0.20%)bitcoind::std::_Hashtable<COutPoint, std::pair<COutPoint const, CCoinsCacheEntry>, PoolAllocator<std::pair<COutPoint const, CCoinsCacheEntry>, 144ul, 8ul>, std::__detail::_Select1st, std::equal_to<COutPoint>, SaltedOutpointHasher, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits<false, false, true> >::_M_insert_unique_node (19,180,887,889 samples, 0.54%)bitcoind::std::_Hashtable<COutPoint, std::pair<COutPoint const, CCoinsCacheEntry>, PoolAllocator<std::pair<COutPoint const, CCoinsCacheEntry>, 144ul, 8ul>, std::__detail::_Select1st, std::equal_to<COutPoint>, SaltedOutpointHasher, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits<false, false, true> >::_M_rehash (12,199,005,039 samples, 0.34%)libc.so.6::__memset_avx512_unaligned_erms (574,777,734 samples, 0.02%)bitcoind::std::__detail::_Hash_node<std::pair<COutPoint const, CCoinsCacheEntry>, false>* std::__detail::_Hashtable_alloc<PoolAllocator<std::__detail::_Hash_node<std::pair<COutPoint const, CCoinsCacheEntry>, false>, 144ul, 8ul> >::_M_allocate_node<std::piecewise_construct_t const&, std::tuple<COutPoint const&>, std::tuple<> > (7,865,255,678 samples, 0.22%)[unknown] (1,969,736,150 samples, 0.06%)[unknown] (1,916,111,977 samples, 0.05%)[unknown] (1,812,200,695 samples, 0.05%)[unknown] (1,812,200,695 samples, 0.05%)[unknown] (1,812,200,695 samples, 0.05%)[unknown] (1,496,076,465 samples, 0.04%)[unknown] (1,234,917,855 samples, 0.03%)[unknown] (921,179,131 samples, 0.03%)[unknown] (658,036,512 samples, 0.02%)[unknown] (507,636,670 samples, 0.01%)bitcoind::CCoinsViewCache::FetchCoin (439,862,693,437 samples, 12.37%)bitcoind::CCoinsViewCache..bitcoind::CCoinsViewCache::GetCoin (567,408,453 samples, 0.02%)bitcoind::SipHashUint256Extra (11,079,411,759 samples, 0.31%)bitcoind::CCoinsViewCache::HaveInputs (468,021,622,384 samples, 13.16%)bitcoind::CCoinsViewCache::..bitcoind::Consensus::CheckTxInputs (525,550,058,887 samples, 14.78%)bitcoind::Consensus::CheckTxInp..bitcoind::CTransaction::GetValueOut (8,116,827,965 samples, 0.23%)bitcoind::EvaluateSequenceLocks (13,084,419,728 samples, 0.37%)bitcoind::CBlockIndex::GetMedianTimePast (12,762,378,539 samples, 0.36%)bitcoind::void std::__introsort_loop<long*, long, __gnu_cxx::__ops::_Iter_less_iter> (1,776,177,595 samples, 0.05%)bitcoind::SipHashUint256Extra (3,528,590,848 samples, 0.10%)bitcoind::CCoinsViewCache::FetchCoin (9,099,104,563 samples, 0.26%)bitcoind::std::_Hashtable<COutPoint, std::pair<COutPoint const, CCoinsCacheEntry>, PoolAllocator<std::pair<COutPoint const, CCoinsCacheEntry>, 144ul, 8ul>, std::__detail::_Select1st, std::equal_to<COutPoint>, SaltedOutpointHasher, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits<false, false, true> >::_M_find_before_node (3,448,133,251 samples, 0.10%)bitcoind::SipHashUint256Extra (373,550,141 samples, 0.01%)bitcoind::CCoinsViewCache::AccessCoin (10,147,664,939 samples, 0.29%)bitcoind::std::_Hashtable<COutPoint, std::pair<COutPoint const, CCoinsCacheEntry>, PoolAllocator<std::pair<COutPoint const, CCoinsCacheEntry>, 144ul, 8ul>, std::__detail::_Select1st, std::equal_to<COutPoint>, SaltedOutpointHasher, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits<false, false, true> >::_M_find_before_node (368,697,772 samples, 0.01%)bitcoind::CScript::GetSigOpCount (1,181,105,155 samples, 0.03%)bitcoind::CScript::IsPayToScriptHash (361,942,649 samples, 0.01%)bitcoind::CScript::IsPushOnly (1,550,137,517 samples, 0.04%)bitcoind::CScript::IsWitnessProgram (14,154,912,421 samples, 0.40%)bitcoind::GetScriptOp (1,727,592,712 samples, 0.05%)bitcoind::CScript::GetSigOpCount (1,617,517,251 samples, 0.05%)bitcoind::GetScriptOp (834,793,526 samples, 0.02%)bitcoind::WitnessSigOps (3,120,635,596 samples, 0.09%)bitcoind::CountWitnessSigOps (25,211,941,345 samples, 0.71%)bitcoind::CScript::GetSigOpCount (21,895,087,837 samples, 0.62%)bitcoind::GetScriptOp (11,871,223,047 samples, 0.33%)bitcoind::GetLegacySigOpCount (26,548,006,408 samples, 0.75%)bitcoind::GetScriptOp (1,822,747,918 samples, 0.05%)bitcoind::SipHashUint256Extra (1,613,835,917 samples, 0.05%)bitcoind::CCoinsViewCache::FetchCoin (6,631,397,326 samples, 0.19%)bitcoind::std::_Hashtable<COutPoint, std::pair<COutPoint const, CCoinsCacheEntry>, PoolAllocator<std::pair<COutPoint const, CCoinsCacheEntry>, 144ul, 8ul>, std::__detail::_Select1st, std::equal_to<COutPoint>, SaltedOutpointHasher, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits<false, false, true> >::_M_find_before_node (2,817,298,340 samples, 0.08%)bitcoind::CCoinsViewCache::AccessCoin (7,316,792,317 samples, 0.21%)bitcoind::CCoinsViewCache::FetchCoin (363,943,746 samples, 0.01%)bitcoind::CScript::GetSigOpCount (1,160,904,417 samples, 0.03%)bitcoind::GetScriptOp (688,273,084 samples, 0.02%)bitcoind::GetScriptOp (2,964,048,193 samples, 0.08%)bitcoind::CScript::GetSigOpCount (5,643,658,755 samples, 0.16%)bitcoind::CScript::IsPayToScriptHash (581,631,871 samples, 0.02%)bitcoind::GetP2SHSigOpCount (15,633,133,461 samples, 0.44%)bitcoind::GetTransactionSigOpCost (84,183,784,739 samples, 2.37%)bit..libstdc++.so.6.0.32::operator delete (405,410,027 samples, 0.01%)bitcoind::SequenceLocks (1,661,951,664 samples, 0.05%)bitcoind::CalculateSequenceLocks (1,453,270,225 samples, 0.04%)bitcoind::SipHashUint256Extra (937,441,713 samples, 0.03%)bitcoind::CCoinsViewCache::FetchCoin (2,049,216,208 samples, 0.06%)bitcoind::SipHashUint256Extra (1,345,870,966 samples, 0.04%)bitcoind::std::_Hashtable<COutPoint, std::pair<COutPoint const, CCoinsCacheEntry>, PoolAllocator<std::pair<COutPoint const, CCoinsCacheEntry>, 144ul, 8ul>, std::__detail::_Select1st, std::equal_to<COutPoint>, SaltedOutpointHasher, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits<false, false, true> >::_M_erase (363,086,362 samples, 0.01%)bitcoind::CCoinsViewCache::SpendCoin (20,676,663,595 samples, 0.58%)bitcoind::std::_Hashtable<COutPoint, std::pair<COutPoint const, CCoinsCacheEntry>, PoolAllocator<std::pair<COutPoint const, CCoinsCacheEntry>, 144ul, 8ul>, std::__detail::_Select1st, std::equal_to<COutPoint>, SaltedOutpointHasher, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits<false, false, true> >::erase (2,777,265,349 samples, 0.08%)bitcoind::SipHashUint256Extra (1,428,091,877 samples, 0.04%)bitcoind::UpdateCoins (24,385,621,354 samples, 0.69%)bitcoind::std::_Hashtable<COutPoint, std::pair<COutPoint const, CCoinsCacheEntry>, PoolAllocator<std::pair<COutPoint const, CCoinsCacheEntry>, 144ul, 8ul>, std::__detail::_Select1st, std::equal_to<COutPoint>, SaltedOutpointHasher, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits<false, false, true> >::erase (473,710,256 samples, 0.01%)bitcoind::AutoFile::write (1,453,030,200 samples, 0.04%)bitcoind::CSHA256::Write (1,519,729,645 samples, 0.04%)bitcoind::CompressAmount (618,711,609 samples, 0.02%)bitcoind::CompressScript (985,913,050 samples, 0.03%)[[ext4]] (404,866,263 samples, 0.01%)bitcoind::node::BlockManager::FindUndoPos (561,722,604 samples, 0.02%)bitcoind::FlatFileSeq::Allocate (509,374,850 samples, 0.01%)libc.so.6::posix_fallocate (509,374,850 samples, 0.01%)[unknown] (509,374,850 samples, 0.01%)[unknown] (509,374,850 samples, 0.01%)[unknown] (509,374,850 samples, 0.01%)[unknown] (457,299,763 samples, 0.01%)bitcoind::AutoFile::write (10,042,610,399 samples, 0.28%)bitcoind::CSHA256::Write (19,844,383,315 samples, 0.56%)bitcoind::sha256_x86_shani::Transform (3,151,148,807 samples, 0.09%)bitcoind::CompressAmount (1,773,668,392 samples, 0.05%)bitcoind::CompressScript (4,638,408,540 samples, 0.13%)bitcoind::prevector<33u, unsigned char, unsigned int, int>::resize (3,040,914,869 samples, 0.09%)bitcoind::CompressAmount (831,251,028 samples, 0.02%)bitcoind::prevector<33u, unsigned char, unsigned int, int>::resize (1,914,945,145 samples, 0.05%)bitcoind::void VectorFormatter<DefaultFormatter>::Ser<SizeComputer, std::vector<CTxUndo, std::allocator<CTxUndo> > > (7,020,871,233 samples, 0.20%)bitcoind::CompressScript (2,957,454,406 samples, 0.08%)bitcoind::AutoFile::write (4,887,544,250 samples, 0.14%)bitcoind::void WriteVarInt<AutoFile, (VarIntMode)0, unsigned int> (5,868,765,238 samples, 0.17%)bitcoind::CSHA256::Write (8,012,816,481 samples, 0.23%)bitcoind::sha256_x86_shani::Transform (938,301,513 samples, 0.03%)bitcoind::void WriteVarInt<HashWriter, (VarIntMode)0, unsigned int> (12,386,753,309 samples, 0.35%)libc.so.6::__memmove_avx512_unaligned_erms (941,007,723 samples, 0.03%)libc.so.6::_IO_fwrite (1,409,554,078 samples, 0.04%)bitcoind::node::BlockManager::UndoWriteToDisk (74,178,487,109 samples, 2.09%)bi..libc.so.6::__memmove_avx512_unaligned_erms (3,806,477,393 samples, 0.11%)bitcoind::CompressAmount (730,340,863 samples, 0.02%)bitcoind::void VectorFormatter<DefaultFormatter>::Ser<SizeComputer, std::vector<CTxUndo, std::allocator<CTxUndo> > > (9,108,229,147 samples, 0.26%)bitcoind::CompressScript (3,027,453,269 samples, 0.09%)bitcoind::prevector<33u, unsigned char, unsigned int, int>::resize (2,034,465,890 samples, 0.06%)bitcoind::void WriteVarInt<AutoFile, (VarIntMode)0, unsigned int> (367,022,852 samples, 0.01%)bitcoind::void WriteVarInt<HashWriter, (VarIntMode)0, unsigned int> (521,478,522 samples, 0.01%)bitcoind::node::BlockManager::WriteUndoDataForBlock (89,569,504,650 samples, 2.52%)bit..bitcoind::std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >::_M_dispose (583,464,405 samples, 0.02%)libc.so.6::malloc (1,716,514,762 samples, 0.05%)bitcoind::Chainstate::ConnectBlock (855,466,273,851 samples, 24.06%)bitcoind::Chainstate::ConnectBlockbitcoind::std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >::_M_dispose (418,091,278 samples, 0.01%)libc.so.6::cfree@GLIBC_2.2.5 (1,144,767,280 samples, 0.03%)bitcoind::Chainstate::ConnectTip (1,291,793,481,748 samples, 36.33%)bitcoind::Chainstate::ConnectTiplibstdc++.so.6.0.32::operator delete (627,918,999 samples, 0.02%)bitcoind::Chainstate::ActivateBestChainStep (1,291,995,942,063 samples, 36.34%)bitcoind::Chainstate::ActivateBestChainStepbitcoind::Chainstate::ActivateBestChain (1,292,515,820,515 samples, 36.35%)bitcoind::Chainstate::ActivateBestChainbitcoind::IsFinalTx (467,637,167 samples, 0.01%)bitcoind::void SerializeTransaction<ParamsStream<SizeComputer&, TransactionSerParams>, CTransaction> (25,890,452,766 samples, 0.73%)bitcoind::void WriteCompactSize<ParamsStream<SizeComputer&, TransactionSerParams> > (6,846,762,723 samples, 0.19%)bitcoind::ContextualCheckBlock (27,706,291,261 samples, 0.78%)bitcoind::void WriteCompactSize<ParamsStream<SizeComputer&, TransactionSerParams> > (1,089,597,648 samples, 0.03%)[[ext4]] (5,576,796,020 samples, 0.16%)[unknown] (3,815,201,507 samples, 0.11%)[unknown] (2,345,433,446 samples, 0.07%)[unknown] (520,783,293 samples, 0.01%)[[ext4]] (7,954,320,588 samples, 0.22%)[unknown] (1,662,013,865 samples, 0.05%)[unknown] (1,269,240,468 samples, 0.04%)[unknown] (705,348,263 samples, 0.02%)[unknown] (455,918,938 samples, 0.01%)[[nvme]] (807,919,787 samples, 0.02%)[[nvme]] (807,919,787 samples, 0.02%)[unknown] (807,919,787 samples, 0.02%)[unknown] (807,919,787 samples, 0.02%)[unknown] (807,919,787 samples, 0.02%)[unknown] (547,049,759 samples, 0.02%)[unknown] (496,243,932 samples, 0.01%)[unknown] (448,114,949 samples, 0.01%)[[ext4]] (12,810,206,632 samples, 0.36%)[unknown] (3,316,731,307 samples, 0.09%)[unknown] (2,036,481,321 samples, 0.06%)[unknown] (1,478,602,939 samples, 0.04%)[unknown] (1,322,893,322 samples, 0.04%)[unknown] (1,227,580,922 samples, 0.03%)[[ext4]] (13,121,603,080 samples, 0.37%)[[ext4]] (13,121,603,080 samples, 0.37%)bitcoind::FlatFileSeq::Flush (13,525,287,477 samples, 0.38%)libc.so.6::fdatasync (13,525,287,477 samples, 0.38%)[unknown] (13,525,287,477 samples, 0.38%)[unknown] (13,525,287,477 samples, 0.38%)[unknown] (13,525,287,477 samples, 0.38%)[[ext4]] (13,525,287,477 samples, 0.38%)[unknown] (13,525,287,477 samples, 0.38%)[unknown] (13,525,287,477 samples, 0.38%)[unknown] (13,525,287,477 samples, 0.38%)[unknown] (13,525,287,477 samples, 0.38%)[unknown] (403,684,397 samples, 0.01%)[unknown] (403,684,397 samples, 0.01%)[unknown] (403,684,397 samples, 0.01%)[[ext4]] (619,895,319 samples, 0.02%)[unknown] (483,644,425 samples, 0.01%)[[ext4]] (981,510,072 samples, 0.03%)[[ext4]] (1,916,459,846 samples, 0.05%)[unknown] (398,097,615 samples, 0.01%)[[ext4]] (1,967,105,500 samples, 0.06%)[[ext4]] (1,967,105,500 samples, 0.06%)bitcoind::node::BlockManager::FindNextBlockPos (16,065,033,072 samples, 0.45%)bitcoind::node::BlockManager::FlushBlockFile (15,700,018,553 samples, 0.44%)bitcoind::node::BlockManager::FlushUndoFile (2,174,731,076 samples, 0.06%)bitcoind::FlatFileSeq::Flush (2,174,731,076 samples, 0.06%)libc.so.6::fdatasync (2,174,731,076 samples, 0.06%)[unknown] (2,174,731,076 samples, 0.06%)[unknown] (2,174,731,076 samples, 0.06%)[unknown] (2,174,731,076 samples, 0.06%)[[ext4]] (2,174,731,076 samples, 0.06%)[unknown] (2,174,731,076 samples, 0.06%)[unknown] (2,174,731,076 samples, 0.06%)[unknown] (2,174,731,076 samples, 0.06%)[unknown] (2,119,891,081 samples, 0.06%)bitcoind::AutoFile::write (5,548,941,818 samples, 0.16%)libc.so.6::__GI___fstatat64 (365,833,677 samples, 0.01%)bitcoind::node::BlockManager::OpenBlockFile (470,569,767 samples, 0.01%)bitcoind::AutoFile::write (61,167,375,809 samples, 1.72%)b..[unknown] (598,511,547 samples, 0.02%)[unknown] (457,806,853 samples, 0.01%)[unknown] (457,806,853 samples, 0.01%)[unknown] (457,806,853 samples, 0.01%)[unknown] (409,659,414 samples, 0.01%)[unknown] (357,939,661 samples, 0.01%)bitcoind::AutoFile::write (4,647,493,060 samples, 0.13%)bitcoind::void WriteCompactSize<ParamsStream<AutoFile&, TransactionSerParams> > (6,592,272,733 samples, 0.19%)libc.so.6::_IO_fwrite (910,505,012 samples, 0.03%)bitcoind::void SerializeMany<ParamsStream<AutoFile&, TransactionSerParams>, CBlockHeader, std::vector<std::shared_ptr<CTransaction const>, std::allocator<std::shared_ptr<CTransaction const> > > > (82,131,751,453 samples, 2.31%)bit..libc.so.6::_IO_fwrite (8,527,040,897 samples, 0.24%)bitcoind::void SerializeMany<ParamsStream<SizeComputer&, TransactionSerParams>, CBlockHeader, std::vector<std::shared_ptr<CTransaction const>, std::allocator<std::shared_ptr<CTransaction const> > > > (15,937,770,258 samples, 0.45%)bitcoind::void WriteCompactSize<ParamsStream<SizeComputer&, TransactionSerParams> > (3,905,423,573 samples, 0.11%)bitcoind::void WriteCompactSize<ParamsStream<AutoFile&, TransactionSerParams> > (571,858,007 samples, 0.02%)bitcoind::node::BlockManager::WriteBlockToDisk (106,357,642,754 samples, 2.99%)bitc..bitcoind::void WriteCompactSize<ParamsStream<SizeComputer&, TransactionSerParams> > (1,696,751,451 samples, 0.05%)bitcoind::void SerializeMany<ParamsStream<SizeComputer&, TransactionSerParams>, CBlockHeader, std::vector<std::shared_ptr<CTransaction const>, std::allocator<std::shared_ptr<CTransaction const> > > > (17,168,001,989 samples, 0.48%)bitcoind::void WriteCompactSize<ParamsStream<SizeComputer&, TransactionSerParams> > (4,825,716,114 samples, 0.14%)bitcoind::node::BlockManager::SaveBlockToDisk (141,154,624,112 samples, 3.97%)bitcoi..bitcoind::void WriteCompactSize<ParamsStream<SizeComputer&, TransactionSerParams> > (1,511,914,109 samples, 0.04%)bitcoind::ChainstateManager::AcceptBlock (169,805,644,100 samples, 4.78%)bitcoind..bitcoind::void SerializeTransaction<ParamsStream<SizeComputer&, TransactionSerParams>, CTransaction> (419,732,705 samples, 0.01%)bitcoind::CScript::GetSigOpCount (1,244,733,942 samples, 0.04%)bitcoind::memcmp@plt (416,583,431 samples, 0.01%)bitcoind::std::_Rb_tree<COutPoint, COutPoint, std::_Identity<COutPoint>, std::less<COutPoint>, std::allocator<COutPoint> >::_M_erase (1,490,186,398 samples, 0.04%)bitcoind::std::pair<std::_Rb_tree_iterator<COutPoint>, bool> std::_Rb_tree<COutPoint, COutPoint, std::_Identity<COutPoint>, std::less<COutPoint>, std::allocator<COutPoint> >::_M_insert_unique<COutPoint const&> (4,247,810,353 samples, 0.12%)bitcoind::void WriteCompactSize<ParamsStream<SizeComputer&, TransactionSerParams> > (781,801,877 samples, 0.02%)libc.so.6::__memcmp_evex_movbe (6,070,441,149 samples, 0.17%)libc.so.6::cfree@GLIBC_2.2.5 (421,482,290 samples, 0.01%)libstdc++.so.6.0.32::operator delete (614,232,991 samples, 0.02%)bitcoind::CheckTransaction (25,650,523,240 samples, 0.72%)libstdc++.so.6.0.32::std::_Rb_tree_insert_and_rebalance (2,281,327,330 samples, 0.06%)bitcoind::CScript::GetSigOpCount (19,161,186,078 samples, 0.54%)bitcoind::GetScriptOp (8,992,060,021 samples, 0.25%)bitcoind::GetLegacySigOpCount (22,614,517,690 samples, 0.64%)bitcoind::GetScriptOp (1,176,069,512 samples, 0.03%)bitcoind::std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >::_M_dispose (1,349,955,285 samples, 0.04%)bitcoind::void SerializeTransaction<ParamsStream<SizeComputer&, TransactionSerParams>, CTransaction> (6,676,130,736 samples, 0.19%)bitcoind::void WriteCompactSize<ParamsStream<SizeComputer&, TransactionSerParams> > (1,329,967,416 samples, 0.04%)bitcoind::CheckBlock (60,550,319,748 samples, 1.70%)b..bitcoind::void WriteCompactSize<ParamsStream<SizeComputer&, TransactionSerParams> > (572,803,846 samples, 0.02%)bitcoind::ChainstateManager::ProcessNewBlock (1,523,688,403,640 samples, 42.85%)bitcoind::ChainstateManager::ProcessNewBlockbitcoind::sha256d64_x86_shani::Transform_2way (15,194,464,935 samples, 0.43%)bitcoind::BlockMerkleRoot (16,172,687,252 samples, 0.45%)bitcoind::ComputeMerkleRoot (15,499,928,925 samples, 0.44%)bitcoind::SHA256D64 (15,246,405,066 samples, 0.43%)bitcoind::CheckMerkleRoot (16,532,547,442 samples, 0.46%)libc.so.6::__memset_avx512_unaligned_erms (359,860,190 samples, 0.01%)bitcoind::sha256d64_x86_shani::Transform_2way (12,972,294,835 samples, 0.36%)bitcoind::SHA256D64 (13,025,009,373 samples, 0.37%)bitcoind::IsBlockMutated (30,129,022,002 samples, 0.85%)bitcoind::CheckWitnessMalleation (13,596,474,560 samples, 0.38%)bitcoind::BlockWitnessMerkleRoot (13,596,474,560 samples, 0.38%)bitcoind::ComputeMerkleRoot (13,077,728,889 samples, 0.37%)bitcoind::void (anonymous namespace)::PeerManagerImpl::MakeAndPushMessage<std::vector<CInv, std::allocator<CInv> >&> (406,479,193 samples, 0.01%)bitcoind::CConnman::PushMessage (406,479,193 samples, 0.01%)bitcoind::std::vector<unsigned char, std::allocator<unsigned char> >::_M_default_append (367,056,757 samples, 0.01%)bitcoind::unsigned long ReadCompactSize<ParamsStream<DataStream&, TransactionSerParams> > (622,762,372 samples, 0.02%)bitcoind::CTransaction::ComputeHasWitness (1,387,667,716 samples, 0.04%)bitcoind::CSHA256::Write (17,955,645,390 samples, 0.51%)bitcoind::sha256_x86_shani::Transform (11,932,913,194 samples, 0.34%)bitcoind::memcpy@plt (418,918,061 samples, 0.01%)bitcoind::sha256_x86_shani::Transform (3,306,980,273 samples, 0.09%)bitcoind::CSHA256::Finalize (22,917,960,073 samples, 0.64%)libc.so.6::__memmove_avx512_unaligned_erms (668,127,949 samples, 0.02%)bitcoind::CSHA256::Write (3,163,584,691 samples, 0.09%)bitcoind::CSHA256::Write (33,313,763,000 samples, 0.94%)bitcoind::sha256_x86_shani::Transform (14,194,928,537 samples, 0.40%)bitcoind::sha256_x86_shani::Transform (767,994,599 samples, 0.02%)bitcoind::CSHA256::Write (5,341,265,376 samples, 0.15%)bitcoind::void WriteCompactSize<ParamsStream<HashWriter&, TransactionSerParams> > (7,984,745,468 samples, 0.22%)bitcoind::void SerializeTransaction<ParamsStream<HashWriter&, TransactionSerParams>, CTransaction> (50,933,406,220 samples, 1.43%)b..libc.so.6::__memmove_avx512_unaligned_erms (5,183,727,187 samples, 0.15%)bitcoind::void WriteCompactSize<ParamsStream<HashWriter&, TransactionSerParams> > (1,613,593,834 samples, 0.05%)bitcoind::CTransaction::ComputeHash (80,845,793,271 samples, 2.27%)bit..bitcoind::CSHA256::Write (23,348,148,278 samples, 0.66%)bitcoind::sha256_x86_shani::Transform (11,595,812,714 samples, 0.33%)bitcoind::CSHA256::Finalize (24,335,325,870 samples, 0.68%)bitcoind::CSHA256::Write (2,288,432,816 samples, 0.06%)bitcoind::CSHA256::Write (64,681,112,465 samples, 1.82%)bi..bitcoind::sha256_x86_shani::Transform (33,677,349,718 samples, 0.95%)bitcoind::sha256_x86_shani::Transform (622,627,277 samples, 0.02%)bitcoind::CSHA256::Write (11,395,509,513 samples, 0.32%)bitcoind::sha256_x86_shani::Transform (523,186,685 samples, 0.01%)bitcoind::void WriteCompactSize<ParamsStream<HashWriter&, TransactionSerParams> > (17,046,149,334 samples, 0.48%)libc.so.6::__memmove_avx512_unaligned_erms (2,169,704,353 samples, 0.06%)bitcoind::void SerializeTransaction<ParamsStream<HashWriter&, TransactionSerParams>, CTransaction> (92,366,151,212 samples, 2.60%)bit..libc.so.6::__memmove_avx512_unaligned_erms (5,813,350,330 samples, 0.16%)bitcoind::void WriteCompactSize<ParamsStream<HashWriter&, TransactionSerParams> > (2,181,533,875 samples, 0.06%)bitcoind::CTransaction::ComputeWitnessHash (122,098,239,092 samples, 3.43%)bitco..bitcoind::CTransaction::CTransaction (213,407,475,563 samples, 6.00%)bitcoind::C..bitcoind::CTransaction::ComputeHasWitness (420,121,661 samples, 0.01%)bitcoind::CTransaction::ComputeHash (409,129,353 samples, 0.01%)bitcoind::DataStream::read (3,530,026,319 samples, 0.10%)bitcoind::operator new (628,502,415 samples, 0.02%)bitcoind::std::vector<unsigned char, std::allocator<unsigned char> >::_M_default_append (6,309,702,925 samples, 0.18%)bitcoind::unsigned long ReadCompactSize<ParamsStream<DataStream&, TransactionSerParams> > (4,193,618,734 samples, 0.12%)bitcoind::void Unserialize<ParamsStream<DataStream&, TransactionSerParams>, 28u, unsigned char> (727,834,750 samples, 0.02%)bitcoind::unsigned long ReadCompactSize<ParamsStream<DataStream&, TransactionSerParams> > (3,066,020,716 samples, 0.09%)bitcoind::void Unserialize<ParamsStream<DataStream&, TransactionSerParams>, 28u, unsigned char> (3,105,560,893 samples, 0.09%)bitcoind::unsigned long ReadCompactSize<ParamsStream<DataStream&, TransactionSerParams> > (1,455,846,726 samples, 0.04%)libc.so.6::__memmove_avx512_unaligned_erms (465,834,593 samples, 0.01%)bitcoind::void VectorFormatter<DefaultFormatter>::Unser<ParamsStream<DataStream&, TransactionSerParams>, std::vector<CTxIn, std::allocator<CTxIn> > > (15,677,650,112 samples, 0.44%)bitcoind::unsigned long ReadCompactSize<ParamsStream<DataStream&, TransactionSerParams> > (1,078,029,303 samples, 0.03%)bitcoind::void Unserialize<ParamsStream<DataStream&, TransactionSerParams>, 28u, unsigned char> (6,171,925,860 samples, 0.17%)bitcoind::unsigned long ReadCompactSize<ParamsStream<DataStream&, TransactionSerParams> > (982,486,879 samples, 0.03%)libc.so.6::__memmove_avx512_unaligned_erms (1,458,516,290 samples, 0.04%)bitcoind::void VectorFormatter<DefaultFormatter>::Unser<ParamsStream<DataStream&, TransactionSerParams>, std::vector<CTxOut, std::allocator<CTxOut> > > (13,963,877,725 samples, 0.39%)libc.so.6::__memmove_avx512_unaligned_erms (1,048,169,614 samples, 0.03%)libc.so.6::__memset_avx512_unaligned (1,046,482,105 samples, 0.03%)libc.so.6::__memset_avx512_unaligned_erms (1,963,080,141 samples, 0.06%)libc.so.6::malloc (3,025,102,825 samples, 0.09%)libstdc++.so.6.0.32::malloc@plt (1,462,651,744 samples, 0.04%)bitcoind::void Unserialize<ParamsStream<DataStream&, TransactionSerParams>, CTransaction> (283,898,001,379 samples, 7.98%)bitcoind::void U..libstdc++.so.6.0.32::operator new (2,257,486,798 samples, 0.06%)bitcoind::void VectorFormatter<DefaultFormatter>::Unser<ParamsStream<DataStream&, TransactionSerParams>, std::vector<CTxIn, std::allocator<CTxIn> > > (1,090,260,916 samples, 0.03%)libc.so.6::__memmove_avx512_unaligned_erms (2,753,503,546 samples, 0.08%)libc.so.6::malloc (1,149,716,024 samples, 0.03%)bitcoind::void ParamsWrapper<TransactionSerParams, CBlock>::Unserialize<DataStream> (291,189,121,636 samples, 8.19%)bitcoind::void P..bitcoind::void VectorFormatter<DefaultFormatter>::Unser<ParamsStream<DataStream&, TransactionSerParams>, std::vector<std::shared_ptr<CTransaction const>, std::allocator<std::shared_ptr<CTransaction const> > > > (291,134,772,004 samples, 8.19%)bitcoind::void V..libstdc++.so.6.0.32::operator new (518,854,210 samples, 0.01%)libc.so.6::__memset_avx512_unaligned_erms (3,588,859,593 samples, 0.10%)bitcoind::CConnman::ThreadMessageHandler (1,852,055,734,561 samples, 52.09%)bitcoind::CConnman::ThreadMessageHandlerlibstdc++.so.6.0.32::execute_native_thread_routine (1,852,107,180,016 samples, 52.09%)libstdc++.so.6.0.32::execute_native_thread_routinebitcoind::std::thread::_State_impl<std::thread::_Invoker<std::tuple<void (*)(std::basic_string_view<char, std::char_traits<char> >, std::function<void ()>), char const*, CConnman::Start(CScheduler&, CConnman::Options const&)::{lambda()#5}> > >::_M_run (1,852,107,180,016 samples, 52.09%)bitcoind::std::thread::_State_impl<std::thread::_Invoker<std::tuple<void (*)(std::basic_string_view<char, std::char..bitcoind::util::TraceThread (1,852,107,180,016 samples, 52.09%)bitcoind::util::TraceThreadlibstdc++.so.6.0.32::std::__cxx11::basic_stringbuf<char, std::char_traits<char>, std::allocator<char> >::overflow (397,900,679 samples, 0.01%)b-msghand (2,401,934,372,954 samples, 67.55%)b-msghand[[igc]] (638,737,826 samples, 0.02%)[unknown] (492,740,386 samples, 0.01%)[unknown] (492,740,386 samples, 0.01%)[unknown] (492,740,386 samples, 0.01%)libc.so.6::__libc_recv (23,769,090,268 samples, 0.67%)[unknown] (23,681,676,959 samples, 0.67%)[unknown] (23,585,908,630 samples, 0.66%)[unknown] (23,544,049,599 samples, 0.66%)[unknown] (23,499,819,825 samples, 0.66%)[unknown] (23,453,162,931 samples, 0.66%)[unknown] (23,205,326,716 samples, 0.65%)[unknown] (23,046,242,743 samples, 0.65%)[unknown] (23,000,657,790 samples, 0.65%)[unknown] (22,592,454,604 samples, 0.64%)[unknown] (21,715,983,496 samples, 0.61%)[unknown] (20,537,782,242 samples, 0.58%)[unknown] (19,311,079,312 samples, 0.54%)[unknown] (6,108,735,942 samples, 0.17%)[unknown] (1,360,583,546 samples, 0.04%)bitcoind::std::vector<std::byte, zero_after_free_allocator<std::byte> >::_M_fill_insert (16,619,401,507 samples, 0.47%)bitcoind::V2Transport::GetReceivedMessage (16,718,730,797 samples, 0.47%)[[igc]] (507,437,414 samples, 0.01%)[unknown] (412,781,498 samples, 0.01%)bitcoind::ChaCha20::Crypt (134,944,431,601 samples, 3.80%)bitcoi..bitcoind::ChaCha20Aligned::Crypt (134,944,431,601 samples, 3.80%)bitcoi..[unknown] (955,536,462 samples, 0.03%)[unknown] (955,536,462 samples, 0.03%)[unknown] (906,440,192 samples, 0.03%)[unknown] (861,463,927 samples, 0.02%)[unknown] (760,654,093 samples, 0.02%)[unknown] (658,510,836 samples, 0.02%)bitcoind::BIP324Cipher::Decrypt (196,638,059,936 samples, 5.53%)bitcoind::..bitcoind::FSChaCha20Poly1305::Decrypt (196,638,059,936 samples, 5.53%)bitcoind::..bitcoind::AEADChaCha20Poly1305::Decrypt (196,638,059,936 samples, 5.53%)bitcoind::..bitcoind::poly1305_donna::poly1305_update (61,693,628,335 samples, 1.74%)b..bitcoind::poly1305_donna::poly1305_blocks (61,693,628,335 samples, 1.74%)b..[unknown] (655,063,915 samples, 0.02%)[unknown] (607,270,235 samples, 0.02%)[unknown] (525,964,847 samples, 0.01%)[unknown] (525,964,847 samples, 0.01%)[unknown] (470,111,416 samples, 0.01%)[unknown] (470,107,658 samples, 0.01%)bitcoind::V2Transport::ProcessReceivedPacketBytes (198,460,164,481 samples, 5.58%)bitcoind::..libc.so.6::__memset_avx512_unaligned_erms (1,781,860,401 samples, 0.05%)bitcoind::V2Transport::ReceivedBytes (203,432,631,557 samples, 5.72%)bitcoind::..libc.so.6::__memmove_avx512_unaligned_erms (4,655,332,308 samples, 0.13%)libc.so.6::__memmove_avx512_unaligned_erms (10,715,799,436 samples, 0.30%)bitcoind::CNode::ReceiveMsgBytes (231,225,287,054 samples, 6.50%)bitcoind::CN..bitcoind::CConnman::SocketHandlerConnected (231,463,366,433 samples, 6.51%)bitcoind::CC..libc.so.6::__poll (3,830,838,327 samples, 0.11%)[unknown] (3,830,838,327 samples, 0.11%)[unknown] (3,782,920,191 samples, 0.11%)[unknown] (3,725,807,764 samples, 0.10%)[unknown] (3,522,157,004 samples, 0.10%)[unknown] (3,150,768,515 samples, 0.09%)[unknown] (2,627,277,437 samples, 0.07%)[unknown] (2,338,467,135 samples, 0.07%)[unknown] (2,037,878,870 samples, 0.06%)[unknown] (1,480,962,324 samples, 0.04%)[unknown] (688,242,613 samples, 0.02%)bitcoind::CConnman::SocketHandler (236,436,484,949 samples, 6.65%)bitcoind::CCo..b-net (260,905,688,952 samples, 7.34%)b-netlibstdc++.so.6.0.32::execute_native_thread_routine (236,875,778,634 samples, 6.66%)libstdc++.so...bitcoind::std::thread::_State_impl<std::thread::_Invoker<std::tuple<void (*)(std::basic_string_view<char, std::char_traits<char> >, std::function<void ()>), char const*, CConnman::Start(CScheduler&, CConnman::Options const&)::{lambda()#1}> > >::_M_run (236,875,778,634 samples, 6.66%)bitcoind::std..bitcoind::util::TraceThread (236,875,778,634 samples, 6.66%)bitcoind::uti..bitcoind::CConnman::ThreadSocketHandler (236,875,778,634 samples, 6.66%)bitcoind::CCo..libc.so.6::_int_free_create_chunk (982,572,444 samples, 0.03%)libc.so.6::_int_free_merge_chunk (797,147,451 samples, 0.02%)[unknown] (2,170,942,655 samples, 0.06%)libc.so.6::__futex_abstimed_wait_common (459,293,920 samples, 0.01%)[unknown] (459,293,920 samples, 0.01%)[unknown] (405,487,988 samples, 0.01%)[unknown] (405,482,438 samples, 0.01%)[unknown] (405,482,438 samples, 0.01%)[unknown] (356,784,451 samples, 0.01%)libc.so.6::__lll_lock_wait_private (57,276,007,979 samples, 1.61%)l..[unknown] (54,932,210,267 samples, 1.54%)[..[unknown] (52,306,124,993 samples, 1.47%)[..[unknown] (51,843,804,338 samples, 1.46%)[..[unknown] (49,115,074,635 samples, 1.38%)[..[unknown] (47,020,328,627 samples, 1.32%)[unknown] (41,124,744,672 samples, 1.16%)[unknown] (38,571,784,780 samples, 1.08%)[unknown] (36,085,617,902 samples, 1.01%)[unknown] (32,172,048,607 samples, 0.90%)[unknown] (24,296,172,973 samples, 0.68%)[unknown] (14,033,556,774 samples, 0.39%)[unknown] (7,508,395,799 samples, 0.21%)[unknown] (3,295,574,070 samples, 0.09%)[unknown] (1,590,496,727 samples, 0.04%)[unknown] (1,002,849,637 samples, 0.03%)[unknown] (414,545,859 samples, 0.01%)libc.so.6::__lll_lock_wake_private (11,041,124,764 samples, 0.31%)[unknown] (10,991,162,572 samples, 0.31%)[unknown] (9,603,504,474 samples, 0.27%)[unknown] (9,459,439,012 samples, 0.27%)[unknown] (7,207,430,735 samples, 0.20%)[unknown] (5,830,933,319 samples, 0.16%)[unknown] (1,889,493,619 samples, 0.05%)[unknown] (394,342,984 samples, 0.01%)libc.so.6::_int_free (67,830,842,133 samples, 1.91%)li..libc.so.6::_int_free_merge_chunk (832,998,780 samples, 0.02%)libc.so.6::cfree@GLIBC_2.2.5 (2,087,601,863 samples, 0.06%)libc.so.6::malloc_consolidate (3,954,686,383 samples, 0.11%)libc.so.6::unlink_chunk.isra.0 (497,585,449 samples, 0.01%)bitcoind::CRollingBloomFilter::insert (356,229,732 samples, 0.01%)[unknown] (444,029,098 samples, 0.01%)[unknown] (397,328,353 samples, 0.01%)[unknown] (397,328,353 samples, 0.01%)[unknown] (397,328,353 samples, 0.01%)[unknown] (397,328,353 samples, 0.01%)bitcoind::CRollingBloomFilter::insert (165,056,371,702 samples, 4.64%)bitcoind..bitcoind::MurmurHash3 (79,485,956,130 samples, 2.24%)bit..[unknown] (508,285,343 samples, 0.01%)[unknown] (450,228,615 samples, 0.01%)[unknown] (404,433,625 samples, 0.01%)[unknown] (404,433,625 samples, 0.01%)[unknown] (404,433,625 samples, 0.01%)[unknown] (404,433,625 samples, 0.01%)bitcoind::MurmurHash3 (5,783,718,949 samples, 0.16%)bitcoind::TxOrphanage::EraseForBlock (4,219,830,042 samples, 0.12%)bitcoind::std::_Rb_tree<COutPoint, std::pair<COutPoint const, std::set<std::_Rb_tree_iterator<std::pair<transaction_identifier<true> const, TxOrphanage::OrphanTx> >, TxOrphanage::IteratorComparator, std::allocator<std::_Rb_tree_iterator<std::pair<transaction_identifier<true> const, TxOrphanage::OrphanTx> > > > >, std::_Select1st<std::pair<COutPoint const, std::set<std::_Rb_tree_iterator<std::pair<transaction_identifier<true> const, TxOrphanage::OrphanTx> >, TxOrphanage::IteratorComparator, std::allocator<std::_Rb_tree_iterator<std::pair<transaction_identifier<true> const, TxOrphanage::OrphanTx> > > > > >, std::less<COutPoint>, std::allocator<std::pair<COutPoint const, std::set<std::_Rb_tree_iterator<std::pair<transaction_identifier<true> const, TxOrphanage::OrphanTx> >, TxOrphanage::IteratorComparator, std::allocator<std::_Rb_tree_iterator<std::pair<transaction_identifier<true> const, TxOrphanage::OrphanTx> > > > > > >::find (834,275,777 samples, 0.02%)bitcoind::node::TxDownloadManagerImpl::BlockConnected (176,131,189,628 samples, 4.95%)bitcoind:..bitcoind::TxRequestTracker::ForgetTxHash (789,439,865 samples, 0.02%)bitcoind::std::_Function_handler<void (), ValidationSignals::BlockConnected(ChainstateRole, std::shared_ptr<CBlock const> const&, CBlockIndex const*)::{lambda()#2}>::_M_invoke (177,028,683,872 samples, 4.98%)bitcoind:..bitcoind::std::_Sp_counted_ptr_inplace<CTransaction const, std::allocator<void>, (__gnu_cxx::_Lock_policy)2>::_M_dispose (18,094,676,466 samples, 0.51%)libc.so.6::cfree@GLIBC_2.2.5 (20,756,908,966 samples, 0.58%)bitcoind::std::_Sp_counted_ptr_inplace<CBlock, std::allocator<void>, (__gnu_cxx::_Lock_policy)2>::_M_dispose (48,020,349,476 samples, 1.35%)b..libstdc++.so.6.0.32::operator delete (7,010,168,745 samples, 0.20%)bitcoind::std::_Sp_counted_ptr_inplace<CTransaction const, std::allocator<void>, (__gnu_cxx::_Lock_policy)2>::_M_dispose (595,474,492 samples, 0.02%)libc.so.6::cfree@GLIBC_2.2.5 (1,199,917,863 samples, 0.03%)bitcoind::std::_Function_handler<void (), ValidationSignals::BlockConnected(ChainstateRole, std::shared_ptr<CBlock const> const&, CBlockIndex const*)::{lambda()#2}>::_M_manager (50,738,017,178 samples, 1.43%)b..bitcoind::std::_Sp_counted_base<(__gnu_cxx::_Lock_policy)2>::_M_release (50,738,017,178 samples, 1.43%)b..libstdc++.so.6.0.32::operator delete (678,866,047 samples, 0.02%)bitcoind::CBlockPolicyEstimator::processBlock (2,721,087,031 samples, 0.08%)bitcoind::TxConfirmStats::UpdateMovingAverages (2,530,304,686 samples, 0.07%)bitcoind::std::_Function_handler<void (), ValidationSignals::MempoolTransactionsRemovedForBlock(std::vector<RemovedMempoolTransactionInfo, std::allocator<RemovedMempoolTransactionInfo> > const&, unsigned int)::{lambda()#2}>::_M_invoke (2,804,941,944 samples, 0.08%)bitcoind::SerialTaskRunner::ProcessQueue (230,828,220,555 samples, 6.49%)bitcoind::Se..bitcoind::CScheduler::serviceQueue (231,341,597,555 samples, 6.51%)bitcoind::CS..bitcoind::std::_Function_handler<void (), Repeat(CScheduler&, std::function<void ()>, std::chrono::duration<long, std::ratio<1l, 1000l> >)::{lambda()#1}>::_M_invoke (386,989,959 samples, 0.01%)bitcoind::Repeat (386,989,959 samples, 0.01%)bitcoind::CSHA512::Finalize (386,989,959 samples, 0.01%)b-scheduler (378,036,629,725 samples, 10.63%)b-schedulerlibstdc++.so.6.0.32::execute_native_thread_routine (231,550,611,141 samples, 6.51%)libstdc++.so..bitcoind::std::thread::_State_impl<std::thread::_Invoker<std::tuple<void (*)(std::basic_string_view<char, std::char_traits<char> >, std::function<void ()>), char const*, AppInitMain(node::NodeContext&, interfaces::BlockAndHeaderTipInfo*)::{lambda()#1}> > >::_M_run (231,550,611,141 samples, 6.51%)bitcoind::st..bitcoind::util::TraceThread (231,550,611,141 samples, 6.51%)bitcoind::ut..[unknown] (1,418,945,425 samples, 0.04%)[unknown] (1,418,945,425 samples, 0.04%)[unknown] (1,418,945,425 samples, 0.04%)[unknown] (1,418,945,425 samples, 0.04%)[unknown] (1,418,945,425 samples, 0.04%)[unknown] (1,418,945,425 samples, 0.04%)[unknown] (1,418,945,425 samples, 0.04%)[unknown] (1,368,549,335 samples, 0.04%)[unknown] (1,263,446,697 samples, 0.04%)[unknown] (1,105,228,005 samples, 0.03%)[unknown] (684,110,353 samples, 0.02%)[unknown] (1,463,102,999 samples, 0.04%)libc.so.6::_int_malloc (1,478,820,457 samples, 0.04%)[unknown] (1,323,329,878 samples, 0.04%)[unknown] (1,219,148,488 samples, 0.03%)[unknown] (1,167,736,581 samples, 0.03%)[unknown] (1,167,736,581 samples, 0.03%)[unknown] (1,115,451,061 samples, 0.03%)[unknown] (1,014,330,812 samples, 0.03%)[unknown] (911,337,057 samples, 0.03%)[unknown] (714,835,817 samples, 0.02%)[unknown] (456,457,319 samples, 0.01%)[unknown] (3,458,133,839 samples, 0.10%)bitcoind::CDBWrapper::~CDBWrapper (1,160,687,762 samples, 0.03%)bitcoind::leveldb::DBImpl::~DBImpl (1,160,687,762 samples, 0.03%)bitcoind::leveldb::DBImpl::~DBImpl (1,160,687,762 samples, 0.03%)bitcoind::leveldb::TableCache::~TableCache (1,160,687,762 samples, 0.03%)bitcoind::leveldb::(anonymous namespace)::ShardedLRUCache::~ShardedLRUCache (1,160,687,762 samples, 0.03%)bitcoind::leveldb::(anonymous namespace)::LRUCache::~LRUCache (1,160,687,762 samples, 0.03%)bitcoind::leveldb::DeleteEntry (1,160,687,762 samples, 0.03%)libc.so.6::__munmap (1,160,687,762 samples, 0.03%)[unknown] (1,160,687,762 samples, 0.03%)[unknown] (1,160,687,762 samples, 0.03%)[unknown] (1,160,687,762 samples, 0.03%)[unknown] (1,160,687,762 samples, 0.03%)[unknown] (1,160,687,762 samples, 0.03%)[unknown] (1,160,687,762 samples, 0.03%)[unknown] (1,160,687,762 samples, 0.03%)[unknown] (1,160,687,762 samples, 0.03%)[unknown] (1,160,687,762 samples, 0.03%)[unknown] (580,697,270 samples, 0.02%)bitcoind::leveldb::PutVarint32 (363,737,260 samples, 0.01%)bitcoind::leveldb::PutLengthPrefixedSlice (571,217,019 samples, 0.02%)bitcoind::leveldb::WriteBatch::Delete (2,702,574,018 samples, 0.08%)bitcoind::leveldb::WriteBatchInternal::SetCount (1,715,286,573 samples, 0.05%)bitcoind::leveldb::WriteBatchInternal::SetCount (1,453,616,163 samples, 0.04%)bitcoind::CDBBatch::EraseImpl (5,090,452,967 samples, 0.14%)bitcoind::leveldb::PutVarint32 (1,872,876,736 samples, 0.05%)bitcoind::leveldb::PutLengthPrefixedSlice (2,343,591,543 samples, 0.07%)bitcoind::leveldb::PutVarint32 (572,117,605 samples, 0.02%)bitcoind::leveldb::PutVarint32 (567,491,257 samples, 0.02%)bitcoind::leveldb::PutLengthPrefixedSlice (938,977,738 samples, 0.03%)bitcoind::leveldb::WriteBatchInternal::Count (619,405,896 samples, 0.02%)bitcoind::leveldb::WriteBatch::Put (2,689,024,451 samples, 0.08%)bitcoind::CDBBatch::WriteImpl (10,634,135,335 samples, 0.30%)bitcoind::leveldb::GetLengthPrefixedSlice (463,225,027 samples, 0.01%)bitcoind::leveldb::GetLengthPrefixedSlice (6,489,010,398 samples, 0.18%)bitcoind::leveldb::GetVarint32 (3,004,905,545 samples, 0.08%)bitcoind::leveldb::GetVarint32 (1,160,323,181 samples, 0.03%)bitcoind::leveldb::Arena::AllocateAligned (406,996,319 samples, 0.01%)bitcoind::leveldb::(anonymous namespace)::BytewiseComparatorImpl::Compare (717,398,174 samples, 0.02%)bitcoind::leveldb::MemTable::KeyComparator::operator (5,108,835,410 samples, 0.14%)bitcoind::leveldb::InternalKeyComparator::Compare (3,324,232,989 samples, 0.09%)bitcoind::leveldb::InternalKeyComparator::Compare (4,244,823,969 samples, 0.12%)bitcoind::leveldb::(anonymous namespace)::BytewiseComparatorImpl::Compare (3,179,677,931 samples, 0.09%)bitcoind::leveldb::(anonymous namespace)::BytewiseComparatorImpl::Compare (33,796,395,298 samples, 0.95%)bitcoind::memcmp@plt (943,665,852 samples, 0.03%)bitcoind::leveldb::SkipList<char const*, leveldb::MemTable::KeyComparator>::Insert (185,524,871,422 samples, 5.22%)bitcoind:..bitcoind::leveldb::SkipList<char const*, leveldb::MemTable::KeyComparator>::FindGreaterOrEqual (178,286,921,652 samples, 5.01%)bitcoind:..bitcoind::leveldb::MemTable::KeyComparator::operator (98,574,957,808 samples, 2.77%)bitc..bitcoind::leveldb::InternalKeyComparator::Compare (75,114,665,063 samples, 2.11%)bi..libc.so.6::__memcmp_evex_movbe (8,323,863,446 samples, 0.23%)bitcoind::leveldb::MemTable::Add (188,893,844,275 samples, 5.31%)bitcoind::..bitcoind::leveldb::VarintLength (766,638,876 samples, 0.02%)bitcoind::leveldb::WriteBatchInternal::InsertInto (199,306,778,687 samples, 5.61%)bitcoind::..bitcoind::leveldb::WriteBatch::Iterate (198,740,714,232 samples, 5.59%)bitcoind::..bitcoind::crc32c::ExtendSse42 (471,197,509 samples, 0.01%)[[ext4]] (679,093,773 samples, 0.02%)[unknown] (522,409,669 samples, 0.01%)[[ext4]] (1,096,838,426 samples, 0.03%)[[ext4]] (1,722,362,275 samples, 0.05%)[unknown] (625,523,849 samples, 0.02%)[unknown] (574,147,567 samples, 0.02%)[unknown] (469,028,477 samples, 0.01%)[unknown] (469,028,477 samples, 0.01%)[unknown] (365,648,781 samples, 0.01%)[[ext4]] (4,389,086,262 samples, 0.12%)[unknown] (2,561,710,219 samples, 0.07%)[unknown] (2,561,710,219 samples, 0.07%)[unknown] (2,352,117,097 samples, 0.07%)[unknown] (1,880,182,821 samples, 0.05%)[unknown] (1,308,734,829 samples, 0.04%)[unknown] (523,736,031 samples, 0.01%)[[ext4]] (5,069,490,473 samples, 0.14%)[unknown] (5,069,490,473 samples, 0.14%)[unknown] (575,311,800 samples, 0.02%)[unknown] (470,084,210 samples, 0.01%)libc.so.6::__GI___libc_write (5,174,401,795 samples, 0.15%)[unknown] (5,174,401,795 samples, 0.15%)[unknown] (5,174,401,795 samples, 0.15%)[unknown] (5,174,401,795 samples, 0.15%)[unknown] (5,174,401,795 samples, 0.15%)bitcoind::CDBWrapper::WriteBatch (205,215,727,495 samples, 5.77%)bitcoind::C..bitcoind::leveldb::DBImpl::Write (205,215,727,495 samples, 5.77%)bitcoind::l..bitcoind::leveldb::log::Writer::AddRecord (5,908,948,808 samples, 0.17%)bitcoind::leveldb::log::Writer::EmitPhysicalRecord (5,908,948,808 samples, 0.17%)bitcoind::CompressScript (1,030,024,630 samples, 0.03%)bitcoind::prevector<33u, unsigned char, unsigned int, int>::resize (459,767,226 samples, 0.01%)bitcoind::void WriteVarInt<DataStream, (VarIntMode)0, unsigned int> (11,377,276,951 samples, 0.32%)bitcoind::void std::vector<std::byte, zero_after_free_allocator<std::byte> >::_M_range_insert<std::byte const*> (8,938,854,890 samples, 0.25%)bitcoind::CCoinsViewDB::BatchWrite (244,230,597,449 samples, 6.87%)bitcoind::CCo..bitcoind::void std::vector<std::byte, zero_after_free_allocator<std::byte> >::_M_range_insert<std::byte const*> (6,482,431,215 samples, 0.18%)bitcoind::std::_Hashtable<COutPoint, std::pair<COutPoint const, CCoinsCacheEntry>, PoolAllocator<std::pair<COutPoint const, CCoinsCacheEntry>, 144ul, 8ul>, std::__detail::_Select1st, std::equal_to<COutPoint>, SaltedOutpointHasher, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits<false, false, true> >::clear (13,761,064,935 samples, 0.39%)bitcoind::void std::vector<std::byte, zero_after_free_allocator<std::byte> >::_M_range_insert<std::byte const*> (364,557,178 samples, 0.01%)bitcoind::CCoinsViewCache::Flush (264,031,161,045 samples, 7.43%)bitcoind::CCoi..libc.so.6::cfree@GLIBC_2.2.5 (5,262,867,110 samples, 0.15%)bitcoind::Chainstate::ForceFlushStateToDisk (264,186,830,154 samples, 7.43%)bitcoind::Chai..bitcoind::Chainstate::FlushStateToDisk (264,186,830,154 samples, 7.43%)bitcoind::Chai..libc.so.6::__libc_start_call_main (265,453,083,455 samples, 7.47%)libc.so.6::__l..bitcoind::main (265,453,083,455 samples, 7.47%)bitcoind::mainbitcoind::Shutdown (265,453,083,455 samples, 7.47%)bitcoind::Shut..libc.so.6::_int_free (2,825,988,487 samples, 0.08%)libc.so.6::malloc_consolidate (2,950,349,980 samples, 0.08%)b-shutoff (278,389,331,208 samples, 7.83%)b-shutofflibc.so.6::unlink_chunk.isra.0 (3,181,018,445 samples, 0.09%)libc.so.6::_int_malloc (620,560,935 samples, 0.02%)[unknown] (518,649,070 samples, 0.01%)[unknown] (466,591,536 samples, 0.01%)[unknown] (466,591,536 samples, 0.01%)[unknown] (466,591,536 samples, 0.01%)[unknown] (415,625,450 samples, 0.01%)[unknown] (415,625,450 samples, 0.01%)[unknown] (363,215,208 samples, 0.01%)[unknown] (1,501,827,638 samples, 0.04%)bitcoind::leveldb::BlockBuilder::Add (581,064,351 samples, 0.02%)bitcoind::leveldb::TableBuilder::Add (1,003,488,869 samples, 0.03%)bitcoind::leveldb::DBImpl::WriteLevel0Table (1,214,913,728 samples, 0.03%)bitcoind::leveldb::BuildTable (1,214,913,728 samples, 0.03%)bitcoind::leveldb::WriteBatchInternal::InsertInto (2,528,384,688 samples, 0.07%)bitcoind::leveldb::WriteBatch::Iterate (2,528,384,688 samples, 0.07%)bitcoind::leveldb::MemTable::Add (2,422,985,691 samples, 0.07%)bitcoind::leveldb::SkipList<char const*, leveldb::MemTable::KeyComparator>::Insert (2,422,985,691 samples, 0.07%)bitcoind::leveldb::SkipList<char const*, leveldb::MemTable::KeyComparator>::FindGreaterOrEqual (2,318,036,540 samples, 0.07%)bitcoind::leveldb::MemTable::KeyComparator::operator (1,429,299,251 samples, 0.04%)bitcoind::leveldb::InternalKeyComparator::Compare (910,982,229 samples, 0.03%)bitcoind::CDBWrapper::CDBWrapper (5,007,147,537 samples, 0.14%)bitcoind::leveldb::DB::Open (5,007,147,537 samples, 0.14%)bitcoind::leveldb::DBImpl::Recover (4,954,666,055 samples, 0.14%)bitcoind::leveldb::DBImpl::RecoverLogFile (4,954,666,055 samples, 0.14%)libc.so.6::__memmove_avx512_unaligned_erms (1,000,227,273 samples, 0.03%)[unknown] (1,000,227,273 samples, 0.03%)[unknown] (1,000,227,273 samples, 0.03%)[unknown] (947,397,460 samples, 0.03%)[unknown] (947,397,460 samples, 0.03%)[unknown] (947,397,460 samples, 0.03%)[unknown] (841,684,608 samples, 0.02%)[unknown] (841,684,608 samples, 0.02%)[unknown] (841,684,608 samples, 0.02%)[unknown] (788,837,171 samples, 0.02%)bitcoind::node::BlockManager::GetAllBlockIndices (356,174,463 samples, 0.01%)bitcoind::base_uint<256u>::operator/= (4,353,340,184 samples, 0.12%)bitcoind::base_uint<256u>::operator>>=(unsigned int) (1,651,178,228 samples, 0.05%)bitcoind::GetBlockProof (4,611,529,418 samples, 0.13%)bitcoind::CSHA256::Finalize (469,172,416 samples, 0.01%)bitcoind::CSHA256::Write (416,395,152 samples, 0.01%)bitcoind::CBlockHeader::GetHash (889,433,319 samples, 0.03%)bitcoind::CSHA256::Write (420,260,903 samples, 0.01%)bitcoind::CheckProofOfWorkImpl (628,054,325 samples, 0.02%)bitcoind::arith_uint256::SetCompact (474,577,125 samples, 0.01%)bitcoind::leveldb::(anonymous namespace)::MergingIterator::Next (359,545,524 samples, 0.01%)bitcoind::leveldb::(anonymous namespace)::DBIter::FindNextUserEntry (615,884,661 samples, 0.02%)bitcoind::std::_Hashtable<uint256, std::pair<uint256 const, CBlockIndex>, std::allocator<std::pair<uint256 const, CBlockIndex> >, std::__detail::_Select1st, std::equal_to<uint256>, BlockHasher, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits<true, false, true> >::_M_rehash (412,349,637 samples, 0.01%)bitcoind::node::BlockManager::InsertBlockIndex (926,266,820 samples, 0.03%)bitcoind::std::_Hashtable<uint256, std::pair<uint256 const, CBlockIndex>, std::allocator<std::pair<uint256 const, CBlockIndex> >, std::__detail::_Select1st, std::equal_to<uint256>, BlockHasher, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits<true, false, true> >::_M_insert_unique_node (621,546,429 samples, 0.02%)bitcoind::kernel::BlockTreeDB::LoadBlockIndexGuts (4,371,914,721 samples, 0.12%)bitcoind::node::BlockManager::GetAllBlockIndices (360,699,633 samples, 0.01%)bitcoind::void std::__introsort_loop<__gnu_cxx::__normal_iterator<CBlockIndex**, std::vector<CBlockIndex*, std::allocator<CBlockIndex*> > >, long, __gnu_cxx::__ops::_Iter_comp_iter<node::CBlockIndexHeightOnlyComparator> > (487,613,426 samples, 0.01%)bitcoind::node::BlockManager::LoadBlockIndexDB (10,756,421,448 samples, 0.30%)bitcoind::node::BlockManager::LoadBlockIndex (10,397,563,911 samples, 0.29%)libc.so.6::__libc_start_call_main (17,915,410,780 samples, 0.50%)bitcoind::main (17,915,410,780 samples, 0.50%)bitcoind::AppInitMain (17,915,410,780 samples, 0.50%)bitcoind::InitAndLoadChainstate (17,915,410,780 samples, 0.50%)bitcoind::node::LoadChainstate (17,915,410,780 samples, 0.50%)bitcoind::node::CompleteChainstateInitialization (17,915,410,780 samples, 0.50%)bitcoind::ChainstateManager::LoadBlockIndex (12,499,349,673 samples, 0.35%)bitcoind::void std::__introsort_loop<__gnu_cxx::__normal_iterator<CBlockIndex**, std::vector<CBlockIndex*, std::allocator<CBlockIndex*> > >, long, __gnu_cxx::__ops::_Iter_comp_iter<node::CBlockIndexHeightOnlyComparator> > (711,414,524 samples, 0.02%)bitcoind::void std::__introsort_loop<__gnu_cxx::__normal_iterator<CBlockIndex**, std::vector<CBlockIndex*, std::allocator<CBlockIndex*> > >, long, __gnu_cxx::__ops::_Iter_comp_iter<node::CBlockIndexHeightOnlyComparator> > (401,238,745 samples, 0.01%)libc.so.6::_int_free (620,663,041 samples, 0.02%)bitcoind::leveldb::(anonymous namespace)::BytewiseComparatorImpl::Compare (868,925,227 samples, 0.02%)bitcoind::leveldb::(anonymous namespace)::MergingIterator::Valid (404,648,282 samples, 0.01%)bitcoind::leveldb::(anonymous namespace)::MergingIterator::value (764,967,422 samples, 0.02%)bitcoind::leveldb::Compaction::ShouldStopBefore (811,517,390 samples, 0.02%)bitcoind::leveldb::TableCache::Evict (924,140,736 samples, 0.03%)bitcoind::leveldb::(anonymous namespace)::ShardedLRUCache::Erase (924,140,736 samples, 0.03%)bitcoind::leveldb::(anonymous namespace)::LRUCache::FinishErase (924,140,736 samples, 0.03%)bitcoind::leveldb::(anonymous namespace)::LRUCache::Unref (924,140,736 samples, 0.03%)bitcoind::leveldb::DeleteEntry (924,140,736 samples, 0.03%)libc.so.6::__munmap (924,140,736 samples, 0.03%)[unknown] (924,140,736 samples, 0.03%)[unknown] (924,140,736 samples, 0.03%)[unknown] (924,140,736 samples, 0.03%)[unknown] (924,140,736 samples, 0.03%)[unknown] (924,140,736 samples, 0.03%)[unknown] (924,140,736 samples, 0.03%)[unknown] (924,140,736 samples, 0.03%)[unknown] (924,140,736 samples, 0.03%)[unknown] (924,140,736 samples, 0.03%)[unknown] (622,330,840 samples, 0.02%)[[jbd2]] (579,680,739 samples, 0.02%)bitcoind::leveldb::DBImpl::DeleteObsoleteFiles (5,386,961,700 samples, 0.15%)libc.so.6::__unlink (4,462,820,964 samples, 0.13%)[unknown] (4,462,820,964 samples, 0.13%)[unknown] (4,462,820,964 samples, 0.13%)[unknown] (4,462,820,964 samples, 0.13%)[unknown] (4,462,820,964 samples, 0.13%)[unknown] (4,462,820,964 samples, 0.13%)[[ext4]] (4,462,820,964 samples, 0.13%)[unknown] (4,413,928,808 samples, 0.12%)[unknown] (4,413,928,808 samples, 0.12%)[unknown] (3,629,480,214 samples, 0.10%)[unknown] (2,527,606,876 samples, 0.07%)[unknown] (1,289,801,972 samples, 0.04%)[unknown] (411,890,158 samples, 0.01%)bitcoind::leveldb::(anonymous namespace)::BytewiseComparatorImpl::Compare (1,451,370,022 samples, 0.04%)bitcoind::leveldb::(anonymous namespace)::BytewiseComparatorImpl::Compare (2,100,345,679 samples, 0.06%)bitcoind::leveldb::(anonymous namespace)::MergingIterator::FindSmallest (7,036,670,089 samples, 0.20%)bitcoind::leveldb::InternalKeyComparator::Compare (5,331,785,618 samples, 0.15%)libc.so.6::__memcmp_evex_movbe (467,739,292 samples, 0.01%)bitcoind::leveldb::Block::Iter::ParseNextKey (1,597,295,639 samples, 0.04%)bitcoind::leveldb::Block::Iter::key (719,412,755 samples, 0.02%)bitcoind::leveldb::(anonymous namespace)::TwoLevelIterator::Next (3,651,719,685 samples, 0.10%)[unknown] (775,514,001 samples, 0.02%)[unknown] (775,514,001 samples, 0.02%)[unknown] (775,514,001 samples, 0.02%)[unknown] (775,514,001 samples, 0.02%)[unknown] (723,468,265 samples, 0.02%)[unknown] (671,854,971 samples, 0.02%)[unknown] (620,745,631 samples, 0.02%)[unknown] (467,020,775 samples, 0.01%)bitcoind::leveldb::ReadBlock (5,036,746,240 samples, 0.14%)bitcoind::crc32c::ExtendSse42 (4,003,982,142 samples, 0.11%)bitcoind::leveldb::(anonymous namespace)::TwoLevelIterator::InitDataBlock (6,011,618,239 samples, 0.17%)bitcoind::leveldb::Table::BlockReader (5,654,181,527 samples, 0.16%)bitcoind::leveldb::(anonymous namespace)::TwoLevelIterator::SkipEmptyDataBlocksForward (6,370,810,843 samples, 0.18%)bitcoind::leveldb::Block::Iter::Valid (514,863,214 samples, 0.01%)bitcoind::leveldb::(anonymous namespace)::TwoLevelIterator::Next (11,878,686,935 samples, 0.33%)[unknown] (357,525,803 samples, 0.01%)bitcoind::leveldb::ReadBlock (1,021,671,534 samples, 0.03%)bitcoind::crc32c::ExtendSse42 (664,145,731 samples, 0.02%)bitcoind::leveldb::(anonymous namespace)::TwoLevelIterator::InitDataBlock (1,177,165,099 samples, 0.03%)bitcoind::leveldb::Table::BlockReader (1,073,054,446 samples, 0.03%)bitcoind::leveldb::(anonymous namespace)::TwoLevelIterator::SkipEmptyDataBlocksForward (3,944,657,665 samples, 0.11%)bitcoind::leveldb::(anonymous namespace)::TwoLevelIterator::Valid (468,344,432 samples, 0.01%)bitcoind::leveldb::Block::Iter::Valid (360,443,695 samples, 0.01%)bitcoind::leveldb::(anonymous namespace)::MergingIterator::Next (25,840,019,062 samples, 0.73%)bitcoind::leveldb::InternalKeyComparator::Compare (877,755,927 samples, 0.02%)bitcoind::leveldb::(anonymous namespace)::MergingIterator::value (460,966,118 samples, 0.01%)bitcoind::leveldb::(anonymous namespace)::TwoLevelIterator::value (1,168,083,499 samples, 0.03%)bitcoind::leveldb::(anonymous namespace)::BytewiseComparatorImpl::Compare (1,027,703,208 samples, 0.03%)bitcoind::leveldb::Compaction::IsBaseLevelForKey (3,331,453,084 samples, 0.09%)libc.so.6::__memcmp_evex_movbe (1,380,364,868 samples, 0.04%)bitcoind::leveldb::(anonymous namespace)::BytewiseComparatorImpl::Compare (1,428,625,135 samples, 0.04%)bitcoind::leveldb::Compaction::ShouldStopBefore (5,019,787,360 samples, 0.14%)bitcoind::leveldb::InternalKeyComparator::Compare (3,376,359,370 samples, 0.09%)libc.so.6::__memcmp_evex_movbe (1,229,056,330 samples, 0.03%)bitcoind::leveldb::DBImpl::DeleteObsoleteFiles (947,024,277 samples, 0.03%)libc.so.6::__unlink (947,024,277 samples, 0.03%)[unknown] (947,024,277 samples, 0.03%)[unknown] (947,024,277 samples, 0.03%)[unknown] (947,024,277 samples, 0.03%)[unknown] (947,024,277 samples, 0.03%)[unknown] (947,024,277 samples, 0.03%)[[ext4]] (947,024,277 samples, 0.03%)[unknown] (947,024,277 samples, 0.03%)[unknown] (947,024,277 samples, 0.03%)[unknown] (891,341,341 samples, 0.03%)[unknown] (632,138,490 samples, 0.02%)[unknown] (416,723,130 samples, 0.01%)bitcoind::leveldb::MemTableIterator::key (1,087,232,643 samples, 0.03%)bitcoind::leveldb::(anonymous namespace)::BytewiseComparatorImpl::Compare (7,618,678,897 samples, 0.21%)bitcoind::leveldb::(anonymous namespace)::BytewiseComparatorImpl::Compare (3,626,299,706 samples, 0.10%)bitcoind::leveldb::InternalKeyComparator::Compare (5,542,426,443 samples, 0.16%)bitcoind::leveldb::PutVarint32 (983,352,417 samples, 0.03%)bitcoind::leveldb::EncodeVarint32 (516,604,326 samples, 0.01%)bitcoind::leveldb::BlockBuilder::Add (15,702,002,539 samples, 0.44%)bitcoind::leveldb::FilterBlockBuilder::AddKey (412,090,761 samples, 0.01%)bitcoind::leveldb::(anonymous namespace)::BloomFilterPolicy::CreateFilter (2,066,056,339 samples, 0.06%)bitcoind::leveldb::Hash (665,922,831 samples, 0.02%)bitcoind::leveldb::FilterBlockBuilder::StartBlock (2,377,127,267 samples, 0.07%)bitcoind::leveldb::FilterBlockBuilder::GenerateFilter (2,377,127,267 samples, 0.07%)bitcoind::leveldb::InternalKeyComparator::Compare (774,664,618 samples, 0.02%)[[ext4]] (567,830,671 samples, 0.02%)[[ext4]] (929,740,986 samples, 0.03%)[unknown] (361,910,315 samples, 0.01%)[[ext4]] (3,216,007,087 samples, 0.09%)[unknown] (2,077,722,358 samples, 0.06%)[unknown] (2,025,638,088 samples, 0.06%)[unknown] (1,766,421,841 samples, 0.05%)[unknown] (1,349,297,830 samples, 0.04%)[unknown] (985,540,031 samples, 0.03%)[[ext4]] (4,245,378,964 samples, 0.12%)[unknown] (4,245,378,964 samples, 0.12%)[unknown] (821,567,389 samples, 0.02%)bitcoind::leveldb::TableBuilder::Flush (5,177,109,910 samples, 0.15%)libc.so.6::__GI___libc_write (4,762,579,653 samples, 0.13%)[unknown] (4,762,579,653 samples, 0.13%)[unknown] (4,762,579,653 samples, 0.13%)[unknown] (4,762,579,653 samples, 0.13%)[unknown] (4,607,316,631 samples, 0.13%)libc.so.6::__memcmp_evex_movbe (2,327,620,616 samples, 0.07%)bitcoind::leveldb::TableBuilder::Add (29,098,360,859 samples, 0.82%)libc.so.6::__memmove_avx512_unaligned_erms (880,376,005 samples, 0.02%)[[ext4]] (576,641,035 samples, 0.02%)[unknown] (419,245,830 samples, 0.01%)[[ext4]] (681,686,302 samples, 0.02%)[[ext4]] (886,736,982 samples, 0.02%)[[ext4]] (886,736,982 samples, 0.02%)[[ext4]] (886,736,982 samples, 0.02%)bitcoind::leveldb::BuildTable (31,594,879,610 samples, 0.89%)libc.so.6::fdatasync (990,237,376 samples, 0.03%)[unknown] (990,237,376 samples, 0.03%)[unknown] (990,237,376 samples, 0.03%)[unknown] (990,237,376 samples, 0.03%)[[ext4]] (990,237,376 samples, 0.03%)[unknown] (990,237,376 samples, 0.03%)[unknown] (990,237,376 samples, 0.03%)[unknown] (990,237,376 samples, 0.03%)[unknown] (990,237,376 samples, 0.03%)bitcoind::leveldb::DBImpl::CompactMemTable (32,644,397,020 samples, 0.92%)bitcoind::leveldb::DBImpl::WriteLevel0Table (31,697,372,743 samples, 0.89%)[[ext4]] (360,420,776 samples, 0.01%)bitcoind::leveldb::TableBuilder::Finish (565,702,739 samples, 0.02%)bitcoind::leveldb::TableBuilder::WriteRawBlock (411,712,919 samples, 0.01%)libc.so.6::__GI___libc_write (411,712,919 samples, 0.01%)[unknown] (411,712,919 samples, 0.01%)[unknown] (411,712,919 samples, 0.01%)[unknown] (411,712,919 samples, 0.01%)[unknown] (411,712,919 samples, 0.01%)[[ext4]] (411,712,919 samples, 0.01%)[unknown] (411,712,919 samples, 0.01%)[[ext4]] (2,407,378,967 samples, 0.07%)[unknown] (1,896,402,811 samples, 0.05%)[unknown] (1,223,588,483 samples, 0.03%)[unknown] (359,102,837 samples, 0.01%)[[ext4]] (3,282,391,421 samples, 0.09%)[unknown] (669,408,205 samples, 0.02%)[[nvme]] (410,427,902 samples, 0.01%)[[nvme]] (410,427,902 samples, 0.01%)[unknown] (410,427,902 samples, 0.01%)[unknown] (410,427,902 samples, 0.01%)[[ext4]] (5,846,551,102 samples, 0.16%)[unknown] (1,539,251,741 samples, 0.04%)[unknown] (1,332,958,992 samples, 0.04%)[unknown] (1,230,554,197 samples, 0.03%)[unknown] (1,230,554,197 samples, 0.03%)[unknown] (1,230,554,197 samples, 0.03%)[unknown] (410,593,098 samples, 0.01%)[[ext4]] (5,999,990,575 samples, 0.17%)[[ext4]] (5,999,990,575 samples, 0.17%)bitcoind::leveldb::DBImpl::FinishCompactionOutputFile (7,646,287,561 samples, 0.22%)libc.so.6::fdatasync (6,926,959,748 samples, 0.19%)[unknown] (6,926,959,748 samples, 0.19%)[unknown] (6,926,959,748 samples, 0.19%)[unknown] (6,926,959,748 samples, 0.19%)[[ext4]] (6,926,959,748 samples, 0.19%)[unknown] (6,926,959,748 samples, 0.19%)[unknown] (6,926,959,748 samples, 0.19%)[unknown] (6,926,959,748 samples, 0.19%)[unknown] (6,720,904,548 samples, 0.19%)[unknown] (618,546,651 samples, 0.02%)[unknown] (618,541,816 samples, 0.02%)[unknown] (618,541,816 samples, 0.02%)[unknown] (618,541,816 samples, 0.02%)[unknown] (513,838,124 samples, 0.01%)[unknown] (411,261,494 samples, 0.01%)bitcoind::leveldb::InternalKeyComparator::Compare (1,176,215,358 samples, 0.03%)bitcoind::leveldb::(anonymous namespace)::BytewiseComparatorImpl::Compare (1,597,987,748 samples, 0.04%)bitcoind::leveldb::EncodeVarint32 (667,083,479 samples, 0.02%)bitcoind::leveldb::(anonymous namespace)::BytewiseComparatorImpl::Compare (3,023,925,193 samples, 0.09%)bitcoind::leveldb::InternalKeyComparator::Compare (5,485,200,607 samples, 0.15%)libc.so.6::__memcmp_evex_movbe (768,462,744 samples, 0.02%)bitcoind::leveldb::BlockBuilder::Add (19,355,464,658 samples, 0.54%)bitcoind::leveldb::PutVarint32 (3,963,072,776 samples, 0.11%)bitcoind::leveldb::EncodeVarint32 (2,006,933,285 samples, 0.06%)bitcoind::leveldb::FilterBlockBuilder::AddKey (1,861,448,821 samples, 0.05%)bitcoind::leveldb::(anonymous namespace)::BloomFilterPolicy::CreateFilter (13,758,298,035 samples, 0.39%)bitcoind::leveldb::Hash (5,062,387,301 samples, 0.14%)bitcoind::leveldb::InternalFilterPolicy::CreateFilter (408,507,196 samples, 0.01%)bitcoind::std::vector<leveldb::Slice, std::allocator<leveldb::Slice> >::_M_default_append (1,029,970,476 samples, 0.03%)bitcoind::leveldb::FilterBlockBuilder::GenerateFilter (16,416,940,319 samples, 0.46%)bitcoind::leveldb::FilterBlockBuilder::StartBlock (16,468,035,714 samples, 0.46%)bitcoind::leveldb::(anonymous namespace)::BytewiseComparatorImpl::Compare (1,040,462,681 samples, 0.03%)bitcoind::leveldb::InternalKeyComparator::Compare (2,329,875,977 samples, 0.07%)bitcoind::leveldb::InternalKeyComparator::FindShortestSeparator (358,735,789 samples, 0.01%)bitcoind::leveldb::PutVarint32 (610,801,466 samples, 0.02%)bitcoind::crc32c::ExtendSse42 (874,382,210 samples, 0.02%)bitcoind::leveldb::TableBuilder::WriteBlock (1,806,524,733 samples, 0.05%)bitcoind::leveldb::TableBuilder::WriteRawBlock (1,390,163,236 samples, 0.04%)libc.so.6::__memmove_avx512_unaligned_erms (413,005,584 samples, 0.01%)[[ext4]] (720,896,427 samples, 0.02%)[[ext4]] (2,836,852,977 samples, 0.08%)[unknown] (1,356,279,497 samples, 0.04%)[[ext4]] (3,876,087,820 samples, 0.11%)[unknown] (634,447,162 samples, 0.02%)[[ext4]] (6,595,884,839 samples, 0.19%)[unknown] (2,409,927,037 samples, 0.07%)[unknown] (2,152,146,763 samples, 0.06%)[unknown] (1,946,544,284 samples, 0.05%)[unknown] (1,691,057,617 samples, 0.05%)[unknown] (1,332,315,567 samples, 0.04%)[unknown] (618,194,201 samples, 0.02%)[unknown] (411,783,313 samples, 0.01%)[[ext4]] (21,402,165,352 samples, 0.60%)[unknown] (13,825,328,165 samples, 0.39%)[unknown] (12,948,506,018 samples, 0.36%)[unknown] (10,591,496,268 samples, 0.30%)[unknown] (8,635,293,060 samples, 0.24%)[unknown] (5,512,816,463 samples, 0.16%)[unknown] (1,755,230,935 samples, 0.05%)[unknown] (358,610,982 samples, 0.01%)[[ext4]] (26,848,872,865 samples, 0.76%)[unknown] (26,183,441,807 samples, 0.74%)[unknown] (3,805,768,350 samples, 0.11%)[unknown] (2,522,380,066 samples, 0.07%)libc.so.6::__GI___libc_write (29,870,807,469 samples, 0.84%)[unknown] (29,663,737,328 samples, 0.83%)[unknown] (29,456,391,053 samples, 0.83%)[unknown] (29,306,607,963 samples, 0.82%)[unknown] (28,793,621,717 samples, 0.81%)[unknown] (869,287,921 samples, 0.02%)bitcoind::leveldb::TableBuilder::Flush (32,039,566,359 samples, 0.90%)bitcoind::leveldb::TableBuilder::status (2,416,608,293 samples, 0.07%)bitcoind::memcpy@plt (1,533,086,169 samples, 0.04%)libc.so.6::__memcmp_evex_movbe (11,663,095,994 samples, 0.33%)libc.so.6::__memmove_avx512_unaligned_erms (6,084,682,703 samples, 0.17%)bitcoind::leveldb::TableBuilder::Add (101,316,031,082 samples, 2.85%)bitc..bitcoind::leveldb::TableBuilder::NumEntries (460,667,349 samples, 0.01%)libc.so.6::__memcmp_evex_movbe (359,824,779 samples, 0.01%)bitcoind::leveldb::DBImpl::DoCompactionWork (188,768,693,249 samples, 5.31%)bitcoind:..libc.so.6::__memmove_avx512_unaligned_erms (972,425,560 samples, 0.03%)bitcoind::leveldb::TableBuilder::NumEntries (767,314,029 samples, 0.02%)bitcoind::leveldb::DBImpl::BackgroundCompaction (198,697,568,504 samples, 5.59%)bitcoind::..libc.so.6::__memmove_avx512_unaligned_erms (569,144,596 samples, 0.02%)bitcoind::leveldb::DBImpl::DeleteObsoleteFiles (591,819,871 samples, 0.02%)libc.so.6::__unlink (591,819,871 samples, 0.02%)[unknown] (591,819,871 samples, 0.02%)[unknown] (591,819,871 samples, 0.02%)[unknown] (591,819,871 samples, 0.02%)[unknown] (591,819,871 samples, 0.02%)[unknown] (591,819,871 samples, 0.02%)[[ext4]] (591,819,871 samples, 0.02%)[unknown] (591,819,871 samples, 0.02%)[unknown] (591,819,871 samples, 0.02%)[unknown] (479,954,726 samples, 0.01%)[unknown] (428,868,095 samples, 0.01%)bitcoind::leveldb::(anonymous namespace)::BytewiseComparatorImpl::Compare (4,760,717,074 samples, 0.13%)bitcoind::leveldb::(anonymous namespace)::BytewiseComparatorImpl::Compare (1,338,954,347 samples, 0.04%)bitcoind::leveldb::InternalKeyComparator::Compare (2,113,914,207 samples, 0.06%)bitcoind::leveldb::BlockBuilder::Add (8,483,080,141 samples, 0.24%)bitcoind::leveldb::PutVarint32 (468,110,226 samples, 0.01%)bitcoind::leveldb::(anonymous namespace)::BloomFilterPolicy::CreateFilter (981,556,026 samples, 0.03%)bitcoind::leveldb::Hash (364,078,664 samples, 0.01%)bitcoind::leveldb::FilterBlockBuilder::StartBlock (1,085,605,353 samples, 0.03%)bitcoind::leveldb::FilterBlockBuilder::GenerateFilter (1,085,605,353 samples, 0.03%)[[ext4]] (363,216,075 samples, 0.01%)[[ext4]] (414,434,148 samples, 0.01%)[[ext4]] (622,462,403 samples, 0.02%)[[ext4]] (2,219,690,360 samples, 0.06%)[unknown] (1,545,848,943 samples, 0.04%)[unknown] (1,545,848,943 samples, 0.04%)[unknown] (1,344,249,592 samples, 0.04%)[unknown] (1,034,709,836 samples, 0.03%)[unknown] (463,122,475 samples, 0.01%)[[ext4]] (2,730,864,687 samples, 0.08%)[unknown] (2,627,509,960 samples, 0.07%)bitcoind::leveldb::TableBuilder::Flush (2,941,422,377 samples, 0.08%)libc.so.6::__GI___libc_write (2,889,358,538 samples, 0.08%)[unknown] (2,889,358,538 samples, 0.08%)[unknown] (2,837,160,085 samples, 0.08%)[unknown] (2,837,160,085 samples, 0.08%)[unknown] (2,837,160,085 samples, 0.08%)libc.so.6::__memcmp_evex_movbe (870,026,684 samples, 0.02%)bitcoind::leveldb::TableBuilder::Add (14,671,945,001 samples, 0.41%)libc.so.6::__memmove_avx512_unaligned_erms (516,334,186 samples, 0.01%)[[ext4]] (366,285,823 samples, 0.01%)bitcoind::leveldb::BuildTable (15,764,968,843 samples, 0.44%)libc.so.6::fdatasync (522,804,809 samples, 0.01%)[unknown] (522,804,809 samples, 0.01%)[unknown] (522,804,809 samples, 0.01%)[unknown] (522,804,809 samples, 0.01%)[[ext4]] (522,804,809 samples, 0.01%)[unknown] (522,804,809 samples, 0.01%)[unknown] (522,804,809 samples, 0.01%)[unknown] (522,804,809 samples, 0.01%)[unknown] (522,804,809 samples, 0.01%)[[ext4]] (470,717,222 samples, 0.01%)[[ext4]] (470,717,222 samples, 0.01%)[[ext4]] (470,717,222 samples, 0.01%)libstdc++.so.6.0.32::execute_native_thread_routine (215,158,735,915 samples, 6.05%)libstdc++.s..bitcoind::leveldb::(anonymous namespace)::PosixEnv::BackgroundThreadEntryPoint (215,158,735,915 samples, 6.05%)bitcoind::l..bitcoind::leveldb::DBImpl::BackgroundCall (215,158,735,915 samples, 6.05%)bitcoind::l..bitcoind::leveldb::DBImpl::CompactMemTable (16,461,167,411 samples, 0.46%)bitcoind::leveldb::DBImpl::WriteLevel0Table (15,869,347,540 samples, 0.45%)bitcoind (236,278,709,104 samples, 6.65%)bitcoindall (3,555,551,407,309 samples, 100%) diff --git a/flake.lock b/flake.lock new file mode 100644 index 000000000000..fc1308c520fa --- /dev/null +++ b/flake.lock @@ -0,0 +1,27 @@ +{ + "nodes": { + "nixpkgs": { + "locked": { + "lastModified": 1764983851, + "narHash": "sha256-y7RPKl/jJ/KAP/VKLMghMgXTlvNIJMHKskl8/Uuar7o=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "d9bc5c7dceb30d8d6fafa10aeb6aa8a48c218454", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixos-25.11", + "repo": "nixpkgs", + "type": "github" + } + }, + "root": { + "inputs": { + "nixpkgs": "nixpkgs" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/flake.nix b/flake.nix new file mode 100644 index 000000000000..a7a1917e4b1b --- /dev/null +++ b/flake.nix @@ -0,0 +1,169 @@ +{ + description = "bitcoind for benchmarking"; + + inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.11"; + + outputs = { + self, + nixpkgs, + }: let + systems = [ + "x86_64-linux" + "aarch64-darwin" + ]; + + forAllSystems = f: nixpkgs.lib.genAttrs systems (system: f system); + + pkgsFor = system: import nixpkgs {inherit system;}; + + mkBitcoinCore = system: let + pkgs = pkgsFor system; + inherit (pkgs) lib; + + pname = "bitcoin-core"; + version = self.shortRev or "dirty"; + + CFlags = toString [ + "-O2" + "-g" + ]; + CXXFlags = "${CFlags} -fno-omit-frame-pointer"; + + nativeBuildInputs = [ + pkgs.cmake + pkgs.ninja + pkgs.pkg-config + pkgs.python3 + ]; + + buildInputs = [ + pkgs.boost188.dev + pkgs.libevent.dev + ]; + + cmakeFlags = [ + "-DBUILD_BENCH=OFF" + "-DBUILD_BITCOIN_BIN=OFF" + "-DBUILD_CLI=OFF" + "-DBUILD_DAEMON=ON" + "-DBUILD_FUZZ_BINARY=OFF" + "-DBUILD_GUI_TESTS=OFF" + "-DBUILD_TESTS=OFF" + "-DBUILD_TX=OFF" + "-DBUILD_UTIL=OFF" + "-DBUILD_WALLET_TOOL=OFF" + "-DCMAKE_BUILD_TYPE=RelWithDebInfo" + "-DCMAKE_SKIP_RPATH=ON" + "-DENABLE_EXTERNAL_SIGNER=OFF" + "-DENABLE_IPC=OFF" + "-DENABLE_WALLET=OFF" + "-DREDUCE_EXPORTS=ON" + "-DWITH_ZMQ=OFF" + ]; + in + pkgs.stdenv.mkDerivation { + inherit + pname + version + nativeBuildInputs + buildInputs + cmakeFlags + ; + + preConfigure = '' + cmakeFlagsArray+=( + "-DAPPEND_CFLAGS=${CFlags}" + "-DAPPEND_CXXFLAGS=${CXXFlags}" + "-DAPPEND_LDFLAGS=-Wl,--as-needed -Wl,-O2" + ) + ''; + + src = builtins.path { + path = ./.; + name = "source"; + }; + + env = { + CMAKE_GENERATOR = "Ninja"; + LC_ALL = "C"; + LIBRARY_PATH = ""; + CPATH = ""; + C_INCLUDE_PATH = ""; + CPLUS_INCLUDE_PATH = ""; + OBJC_INCLUDE_PATH = ""; + OBJCPLUS_INCLUDE_PATH = ""; + }; + + dontStrip = true; + + meta = { + description = "bitcoind for benchmarking"; + homepage = "https://bitcoincore.org/"; + license = lib.licenses.mit; + }; + }; + in { + packages = forAllSystems (system: { + default = mkBitcoinCore system; + }); + + formatter = forAllSystems (system: (pkgsFor system).nixfmt-tree); + + devShells = forAllSystems ( + system: let + pkgs = pkgsFor system; + inherit (pkgs) stdenv; + + # Override the default cargo-flamegraph with a custom fork including bitcoin highlighting + cargo-flamegraph = pkgs.rustPlatform.buildRustPackage rec { + pname = "flamegraph"; + version = "bitcoin-core"; + + src = pkgs.fetchFromGitHub { + owner = "willcl-ark"; + repo = "flamegraph"; + rev = "bitcoin-core"; + sha256 = "sha256-tQbr3MYfAiOxeT12V9au5KQK5X5JeGuV6p8GR/Sgen4="; + }; + + doCheck = false; + cargoHash = "sha256-QWPqTyTFSZNJNayNqLmsQSu0rX26XBKfdLROZ9tRjrg="; + + nativeBuildInputs = pkgs.lib.optionals stdenv.hostPlatform.isLinux [pkgs.makeWrapper]; + buildInputs = pkgs.lib.optionals stdenv.hostPlatform.isDarwin [ + pkgs.darwin.apple_sdk.frameworks.Security + ]; + + postFixup = pkgs.lib.optionalString stdenv.hostPlatform.isLinux '' + wrapProgram $out/bin/cargo-flamegraph \ + --set-default PERF ${pkgs.perf}/bin/perf + wrapProgram $out/bin/flamegraph \ + --set-default PERF ${pkgs.perf}/bin/perf + ''; + }; + in { + default = pkgs.mkShell { + buildInputs = [ + # Benchmarking + cargo-flamegraph + pkgs.flamegraph + pkgs.hyperfine + pkgs.jq + pkgs.just + pkgs.perf + pkgs.perf-tools + pkgs.python312 + pkgs.python312Packages.jinja2 + pkgs.python312Packages.matplotlib + pkgs.ruff + pkgs.ty + pkgs.util-linux + + # Binary patching + pkgs.patchelf + ]; + }; + } + ); + }; +} diff --git a/justfile b/justfile new file mode 100644 index 000000000000..f38282e5d981 --- /dev/null +++ b/justfile @@ -0,0 +1,107 @@ +set shell := ["bash", "-uc"] + +default: + just --list + +# ============================================================================ +# Local benchmarking commands +# ============================================================================ + +# Test instrumented run using signet (includes report generation) +[group('local')] +test-instrumented commit datadir: + nix develop --command python3 bench.py build --skip-existing {{ commit }}:pr + nix develop --command python3 bench.py --profile quick run \ + --benchmark-config bench/configs/pr.toml \ + --matrix-entry 450-true \ + --datadir {{ datadir }} \ + pr:./binaries/pr/bitcoind + nix develop --command python3 bench.py report bench-output/ bench-output/ + +# Test uninstrumented run using signet +[group('local')] +test-uninstrumented commit datadir: + nix develop --command python3 bench.py build --skip-existing {{ commit }}:pr + nix develop --command python3 bench.py --profile quick run \ + --benchmark-config bench/configs/pr.toml \ + --matrix-entry 450-false \ + --datadir {{ datadir }} \ + pr:./binaries/pr/bitcoind + +# Full benchmark with instrumentation (flamegraphs + plots) +[group('local')] +instrumented commit datadir: + python3 bench.py build {{ commit }}:pr + python3 bench.py run \ + --benchmark-config bench/configs/pr.toml \ + --matrix-entry 450-true \ + --datadir {{ datadir }} \ + pr:./binaries/pr/bitcoind + +# Just build a binary (useful for incremental testing) +[group('local')] +build commit: + python3 bench.py build {{ commit }} + +# Run benchmark with pre-built binary +[group('local')] +run datadir binary: + python3 bench.py run \ + --benchmark-config bench/configs/pr.toml \ + --matrix-entry 450-false \ + --datadir {{ datadir }} \ + {{ binary }} + +# Generate plots from a debug.log file +[group('local')] +analyze commit logfile output_dir="./plots": + python3 bench.py analyze {{ commit }} {{ logfile }} --output-dir {{ output_dir }} + +# Generate HTML report from benchmark results +[group('local')] +report input_dir output_dir nightly_history="": + #!/usr/bin/env bash + set -euo pipefail + if [ -n "{{ nightly_history }}" ]; then + python3 bench.py report {{ input_dir }} {{ output_dir }} --nightly-history {{ nightly_history }} + else + python3 bench.py report {{ input_dir }} {{ output_dir }} + fi + +# ============================================================================ +# CI commands (called by GitHub Actions) +# ============================================================================ + +# Build binary for CI +[group('ci')] +ci-build commit binaries_dir: + python3 bench.py build -o {{ binaries_dir }} {{ commit }}:pr + +# Run benchmark for CI +[group('ci')] +ci-run benchmark_config matrix_entry datadir tmp_datadir output_dir binaries_dir: + python3 bench.py run \ + --benchmark-config {{ benchmark_config }} \ + --matrix-entry {{ matrix_entry }} \ + --datadir {{ datadir }} \ + --tmp-datadir {{ tmp_datadir }} \ + --output-dir {{ output_dir }} \ + pr:{{ binaries_dir }}/pr/bitcoind + +# ============================================================================ +# Git helpers +# ============================================================================ + +# Cherry-pick commits from a Bitcoin Core PR onto this branch +[group('git')] +pick-pr pr_number: + #!/usr/bin/env bash + set -euxo pipefail + + if ! git remote get-url upstream 2>/dev/null | grep -q "bitcoin/bitcoin"; then + echo "Error: 'upstream' remote not found or doesn't point to bitcoin/bitcoin" + echo "Please add it with: git remote add upstream https://github.com/bitcoin/bitcoin.git" + exit 1 + fi + + git fetch upstream pull/{{ pr_number }}/head:bench-{{ pr_number }} && git cherry-pick $(git rev-list --reverse bench-{{ pr_number }} --not upstream/master) diff --git a/ruff.toml b/ruff.toml new file mode 100644 index 000000000000..35d385e82ac3 --- /dev/null +++ b/ruff.toml @@ -0,0 +1,2 @@ +target-version = "py312" +include = ["bench.py", "bench/*.py"] diff --git a/src/bench/CMakeLists.txt b/src/bench/CMakeLists.txt index e1d5e4097ff4..07e80d61cd45 100644 --- a/src/bench/CMakeLists.txt +++ b/src/bench/CMakeLists.txt @@ -19,6 +19,7 @@ add_executable(bench_bitcoin checkblockindex.cpp checkqueue.cpp cluster_linearize.cpp + coins_view_overlay.cpp connectblock.cpp crypto_hash.cpp descriptors.cpp diff --git a/src/bench/coins_view_overlay.cpp b/src/bench/coins_view_overlay.cpp new file mode 100644 index 000000000000..a6ccbfea068e --- /dev/null +++ b/src/bench/coins_view_overlay.cpp @@ -0,0 +1,346 @@ +// Copyright (c) 2025-present The Bitcoin Core developers +// Distributed under the MIT software license, see the accompanying +// file COPYING or http://www.opensource.org/licenses/mit-license.php. + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include + +// Microbenchmarks isolating the per-input/per-tx loop in +// CoinsViewOverlay::StartFetching. All variants pre-reserve the inputs vector +// and a per-block txid set; the difference is the txid set's key/hasher and +// whether the inline self-spend filter is performed at populate time. +// +// A) PreReservedSet - current approach. The set stores 64-bit QuickHashes +// of txids and is consulted later by worker threads, so populate just +// pushes every input and emplaces every txid hash. +// E) HoistedSet - same as A) but additionally performs the inline filter +// (skip pushing inputs whose prevout's QuickHash already appears in the +// set). Models doing the same filtering work that A's workers do, but at +// populate time. +// F) HoistedSetTxid - inline filter, with the txid set keyed by Txid (32B) +// and hashed by SaltedTxidHasher (SipHash-2-4 over uint256). +// G) HoistedSetTxidQuickHash - inline filter, txid set keyed by Txid but +// hashed by QuickHashHasher (xor-add of 4 uint64 limbs against a salt). +// Isolates the cost of the wider key from the hasher choice. +// J) HoistedSetTxidJumbo - inline filter, txid set keyed by Txid hashed by +// a SipHash-1-3 jumboblock variant adapted from +// https://github.com/l0rinc/bitcoin/pull/70. The full 256-bit hash is +// mixed in a single SipRound (instead of four message blocks), so the +// cost is one compression round + three finalization rounds = 4 rounds +// total per call (vs 14 for SaltedTxidHasher's SH24+UP path on outpoints). +// +// All benchmarks build the same block once (deterministically) so the only +// difference between them is the populate strategy. +namespace { + +constexpr size_t TX_COUNT{5'000}; +constexpr size_t INPUTS_PER_TX{4}; +constexpr size_t TOTAL_INPUTS{TX_COUNT * INPUTS_PER_TX}; + +using QuickHash = uint64_t; + +//! Mirrors CoinsViewOverlay::QuickHashHasher. Reproduced here so the benchmark +//! does not depend on internals of CoinsViewOverlay. +class QuickHashHasher +{ + uint64_t m_key[4]; + +public: + explicit QuickHashHasher(bool deterministic) noexcept + { + FastRandomContext rng{deterministic}; + for (auto& k : m_key) k = rng.rand64(); + } + +#if defined(__clang__) + __attribute__((no_sanitize("unsigned-integer-overflow"))) +#endif + QuickHash operator()(const Txid& txid) const noexcept + { + const auto& hash_input{txid.ToUint256()}; + QuickHash out{0}; + for (const auto i : std::views::iota(0, 4)) out += hash_input.GetUint64(i) ^ m_key[i]; + return out; + } +}; + +//! SipHash-1-3 jumboblock hasher adapted from +//! https://github.com/l0rinc/bitcoin/pull/70 for the Txid-only (32-byte) case. +//! The full uint256 is absorbed as a single block (1 compression SipRound), +//! followed by 3 finalization SipRounds. Replicated locally so the benchmark +//! works on a master tree without merging the PR. +class JumboTxidHasher +{ + uint64_t m_k0; + uint64_t m_k1; + + ALWAYS_INLINE static void SipRound(uint64_t& v0, uint64_t& v1, uint64_t& v2, uint64_t& v3) noexcept + { + uint64_t a{v0}, b{v1}, c{v2}, d{v3}; + a += b; b = std::rotl(b, 13); b ^= a; + a = std::rotl(a, 32); + c += d; d = std::rotl(d, 16); d ^= c; + a += d; d = std::rotl(d, 21); d ^= a; + c += b; b = std::rotl(b, 17); b ^= c; + c = std::rotl(c, 32); + v0 = a; v1 = b; v2 = c; v3 = d; + } + +public: + explicit JumboTxidHasher(bool deterministic) noexcept + { + FastRandomContext rng{deterministic}; + m_k0 = rng.rand64(); + m_k1 = rng.rand64(); + } + + ALWAYS_INLINE size_t operator()(const Txid& txid) const noexcept + { + static constexpr uint64_t C0{0x736f6d6570736575ULL}; + static constexpr uint64_t C1{0x646f72616e646f6dULL}; + static constexpr uint64_t C2{0x6c7967656e657261ULL}; + static constexpr uint64_t C3{0x7465646279746573ULL}; + + const auto& u{txid.ToUint256()}; + const uint64_t m0{u.GetUint64(0)}; + const uint64_t m1{u.GetUint64(1)}; + const uint64_t m2{u.GetUint64(2)}; + const uint64_t m3{u.GetUint64(3)}; + + uint64_t v0{C0 ^ m_k0}; + uint64_t v1{C1 ^ m_k1}; + uint64_t v2{C2 ^ m_k0}; + uint64_t v3{C3 ^ m_k1}; + + v0 ^= m0; + v1 ^= m1; + v2 ^= m2; + v3 ^= m3; + SipRound(v0, v1, v2, v3); + v0 ^= m3; + v1 ^= m0; + v2 ^= m1; + v3 ^= m2; + + v2 ^= 0xff; + SipRound(v0, v1, v2, v3); + SipRound(v0, v1, v2, v3); + SipRound(v0, v1, v2, v3); + return v0 ^ v1 ^ v2 ^ v3; + } +}; + +//! Mirrors CoinsViewOverlay::InputToFetch so the per-input emplace cost is +//! representative. +struct InputToFetch { + std::atomic_flag ready{}; + const COutPoint& outpoint; + std::optional coin{std::nullopt}; + + InputToFetch(InputToFetch&& other) noexcept : outpoint{other.outpoint} {} + explicit InputToFetch(const COutPoint& o LIFETIMEBOUND) noexcept : outpoint{o} {} +}; + +//! Build a block with TX_COUNT transactions of INPUTS_PER_TX random prevouts +//! each, plus a coinbase. Prevouts are fully random so the inline filter +//! never short-circuits, exercising the worst case for the variants that pay +//! a per-input lookup. +CBlock CreateRandomBlock() +{ + FastRandomContext rng{/*deterministic=*/true}; + + CBlock block; + block.vtx.reserve(TX_COUNT + 1); + + CMutableTransaction coinbase; + coinbase.vin.resize(1); + coinbase.vin[0].prevout.SetNull(); + coinbase.vout.resize(1); + block.vtx.emplace_back(MakeTransactionRef(std::move(coinbase))); + + for (size_t i{0}; i < TX_COUNT; ++i) { + CMutableTransaction tx; + tx.vin.reserve(INPUTS_PER_TX); + for (size_t j{0}; j < INPUTS_PER_TX; ++j) { + tx.vin.emplace_back(COutPoint{Txid::FromUint256(rng.rand256()), rng.rand32()}); + } + tx.vout.resize(1); + block.vtx.emplace_back(MakeTransactionRef(std::move(tx))); + } + return block; +} + +//! Approach A: pre-reserved inputs vector and pre-reserved QuickHash txids +//! set, both treated as long-lived members. No inline filter; just push every +//! input and emplace every txid quickhash, the way StartFetching does today. +void CoinsViewOverlayStartFetchingPreReservedSet(benchmark::Bench& bench) +{ + const CBlock block{CreateRandomBlock()}; + const QuickHashHasher hasher{/*deterministic=*/true}; + + std::vector inputs; + std::unordered_set txids; + inputs.reserve(TOTAL_INPUTS); + txids.reserve(TX_COUNT); + + bench.unit("block").epochIterations(1) + .setup([&] { + inputs.clear(); + txids.clear(); + }) + .run([&] { + for (const auto& tx : block.vtx | std::views::drop(1)) { + for (const auto& in : tx->vin) { + inputs.emplace_back(in.prevout); + } + txids.emplace(hasher(tx->GetHash())); + } + ankerl::nanobench::doNotOptimizeAway(inputs); + ankerl::nanobench::doNotOptimizeAway(txids); + }); +} + +//! Approach E: same hoisted QuickHash txids set as A) but adds the inline +//! filter (skip pushing inputs whose prevout-quickhash is already in the set). +void CoinsViewOverlayStartFetchingHoistedSet(benchmark::Bench& bench) +{ + const CBlock block{CreateRandomBlock()}; + const QuickHashHasher hasher{/*deterministic=*/true}; + + std::vector inputs; + std::unordered_set txids; + inputs.reserve(TOTAL_INPUTS); + txids.reserve(TX_COUNT); + + bench.unit("block").epochIterations(1) + .setup([&] { + inputs.clear(); + txids.clear(); + }) + .run([&] { + for (const auto& tx : block.vtx | std::views::drop(1)) { + for (const auto& in : tx->vin) { + if (txids.contains(hasher(in.prevout.hash))) continue; + inputs.emplace_back(in.prevout); + } + txids.emplace(hasher(tx->GetHash())); + } + ankerl::nanobench::doNotOptimizeAway(inputs); + ankerl::nanobench::doNotOptimizeAway(txids); + }); +} + +//! Approach F: hoisted Txid+SaltedTxidHasher set with inline filter. Stores +//! 32-byte Txid keys hashed by SipHash-2-4 (SH24+UP path). +void CoinsViewOverlayStartFetchingHoistedSetTxid(benchmark::Bench& bench) +{ + const CBlock block{CreateRandomBlock()}; + + std::vector inputs; + std::unordered_set txids; + inputs.reserve(TOTAL_INPUTS); + txids.reserve(TX_COUNT); + + bench.unit("block").epochIterations(1) + .setup([&] { + inputs.clear(); + txids.clear(); + }) + .run([&] { + for (const auto& tx : block.vtx | std::views::drop(1)) { + for (const auto& in : tx->vin) { + if (txids.contains(in.prevout.hash)) continue; + inputs.emplace_back(in.prevout); + } + txids.emplace(tx->GetHash()); + } + ankerl::nanobench::doNotOptimizeAway(inputs); + ankerl::nanobench::doNotOptimizeAway(txids); + }); +} + +//! Approach G: hoisted Txid+QuickHashHasher set with inline filter. Same +//! 32-byte key as F) but with the cheap xor-add hasher used in A) and E). +void CoinsViewOverlayStartFetchingHoistedSetTxidQuickHash(benchmark::Bench& bench) +{ + const CBlock block{CreateRandomBlock()}; + + std::vector inputs; + std::unordered_set txids{ + /*bucket_count=*/0, QuickHashHasher{/*deterministic=*/true}}; + inputs.reserve(TOTAL_INPUTS); + txids.reserve(TX_COUNT); + + bench.unit("block").epochIterations(1) + .setup([&] { + inputs.clear(); + txids.clear(); + }) + .run([&] { + for (const auto& tx : block.vtx | std::views::drop(1)) { + for (const auto& in : tx->vin) { + if (txids.contains(in.prevout.hash)) continue; + inputs.emplace_back(in.prevout); + } + txids.emplace(tx->GetHash()); + } + ankerl::nanobench::doNotOptimizeAway(inputs); + ankerl::nanobench::doNotOptimizeAway(txids); + }); +} + +//! Approach J: hoisted Txid+JumboTxidHasher set with inline filter. Same +//! 32-byte key as F) and G), but hashed with the SipHash-1-3 jumboblock +//! variant from https://github.com/l0rinc/bitcoin/pull/70 (4 SipRounds total +//! for the Txid-only case). +void CoinsViewOverlayStartFetchingHoistedSetTxidJumbo(benchmark::Bench& bench) +{ + const CBlock block{CreateRandomBlock()}; + + std::vector inputs; + std::unordered_set txids{ + /*bucket_count=*/0, JumboTxidHasher{/*deterministic=*/true}}; + inputs.reserve(TOTAL_INPUTS); + txids.reserve(TX_COUNT); + + bench.unit("block").epochIterations(1) + .setup([&] { + inputs.clear(); + txids.clear(); + }) + .run([&] { + for (const auto& tx : block.vtx | std::views::drop(1)) { + for (const auto& in : tx->vin) { + if (txids.contains(in.prevout.hash)) continue; + inputs.emplace_back(in.prevout); + } + txids.emplace(tx->GetHash()); + } + ankerl::nanobench::doNotOptimizeAway(inputs); + ankerl::nanobench::doNotOptimizeAway(txids); + }); +} + +} // namespace + +BENCHMARK(CoinsViewOverlayStartFetchingPreReservedSet); +BENCHMARK(CoinsViewOverlayStartFetchingHoistedSet); +BENCHMARK(CoinsViewOverlayStartFetchingHoistedSetTxid); +BENCHMARK(CoinsViewOverlayStartFetchingHoistedSetTxidQuickHash); +BENCHMARK(CoinsViewOverlayStartFetchingHoistedSetTxidJumbo); diff --git a/src/coins.h b/src/coins.h index ae7f34f46581..7a9ee7349301 100644 --- a/src/coins.h +++ b/src/coins.h @@ -8,21 +8,33 @@ #include #include +#include #include #include +#include #include +#include #include #include #include #include #include #include +#include #include #include +#include #include +#include +#include +#include +#include #include +#include +#include +#include /** * A UTXO entry. @@ -376,7 +388,7 @@ class CCoinsViewBacked : public CCoinsView public: explicit CCoinsViewBacked(CCoinsView* in_view) : base{Assert(in_view)} {} - void SetBackend(CCoinsView& in_view) { base = &in_view; } + virtual void SetBackend(CCoinsView& in_view) { base = &in_view; } std::optional GetCoin(const COutPoint& outpoint) const override { return base->GetCoin(outpoint); } std::optional PeekCoin(const COutPoint& outpoint) const override { return base->PeekCoin(outpoint); } @@ -415,7 +427,7 @@ class CCoinsViewCache : public CCoinsViewBacked * Discard all modifications made to this cache without flushing to the base view. * This can be used to efficiently reuse a cache instance across multiple operations. */ - void Reset() noexcept; + virtual void Reset() noexcept; /* Fetch the coin from base. Used for cache misses in FetchCoin. */ virtual std::optional FetchCoinFromBase(const COutPoint& outpoint) const; @@ -487,7 +499,7 @@ class CCoinsViewCache : public CCoinsViewBacked * If reallocate_cache is false, the cache will retain the same memory footprint * after flushing and should be destroyed to deallocate. */ - void Flush(bool reallocate_cache = true); + virtual void Flush(bool reallocate_cache = true); /** * Push the modifications applied to this cache to its base while retaining @@ -495,7 +507,7 @@ class CCoinsViewCache : public CCoinsViewBacked * Failure to call this method or Flush() before destruction will cause the changes * to be forgotten. */ - void Sync(); + virtual void Sync(); /** * Removes the UTXO with the given outpoint from the cache, if it is @@ -553,24 +565,266 @@ class CCoinsViewCache : public CCoinsViewBacked }; /** - * CCoinsViewCache overlay that avoids populating/mutating parent cache layers on cache misses. + * CCoinsViewCache subclass that asynchronously fetches all block inputs in parallel during ConnectBlock without + * mutating the base cache. * - * This is achieved by fetching coins from the base view using PeekCoin() instead of GetCoin(), - * so intermediate CCoinsViewCache layers are not filled. + * Only used in ConnectBlock to pass as an ephemeral view that can be reset if the block is invalid. + * It provides the same interface as CCoinsViewCache. It overrides all methods that mutate base, + * stopping threads before calling superclass. + * It adds an additional StartFetching method to provide the block. * - * Used during ConnectBlock() as an ephemeral, resettable top-level view that is flushed only - * on success, so invalid blocks don't pollute the underlying cache. + * When a block is passed to StartFetching, the block txids are first inserted into m_txids (an + * unordered set keyed by Txid with SaltedTxidHasher providing bucket selection). The block inputs are + * then iterated in order, and any input whose prevout.hash is already in m_txids (a same-block + * spend of an earlier transaction) is filtered out; remaining inputs are appended to m_inputs as + * InputToFetch objects. m_txids is cleared after the loop. m_inputs is reserved up front to + * MAX_INPUTS_PER_BLOCK at construction so emplace_back never reallocates and per-input state is + * stable for the lifetime of the prefetch. + * + * StartFetching then submits worker tasks to a ThreadPool and keeps the returned futures alive + * until fetching is stopped. + * + * ProcessInput() atomically fetches and increments m_input_head, so each thread can only access a + * single element of the m_inputs vector at a time. Workers race to claim inputs, so they may + * fetch elements in any order. If the fetched index is greater than the size of m_inputs, no + * more inputs can be fetched and false is returned. Otherwise the worker fetches the coin from + * the base cache and moves it into the InputToFetch object. The ready flag is then set with a + * release memory order, which allows it to be used as a memory fence: writing the coin happens + * before another thread observes the flag via an acquire load. + * This assumes all base->PeekCoin() paths are safe for concurrent readers and do not mutate + * lower cache layers. + * + * When a coin is requested from the cache on the main thread and is not already in cacheCoins + * map, the coin is first looked up from the m_inputs vector instead of the base cache. The + * vector is scanned beginning at the element at m_input_tail. If the InputToFetch object has the + * same outpoint as requested, m_input_tail is advanced to the next index so the previous inputs + * do not need to be scanned again. The InputToFetch object's ready flag is tested with an acquire + * memory order; if the object is ready, the background worker has completed and the coin can be + * moved from it. If the object is not ready, the main thread will call ProcessInput() itself + * until the requested coin becomes ready. This allows the main thread to keep making progress + * (by fetching other inputs) rather than blocking on a specific worker. + * + * StopFetching() is called before mutating operations (Flush/Sync/Reset/SetBackend). It stops + * fetching by moving m_input_head to the end of m_inputs (so workers quickly exit), then waits + * for all futures to complete and clears m_inputs. + * + * Workers advance m_input_head to fetch inputs. Main thread advances m_input_tail to consume. + * + * Before workers start: + * + * m_input_head + * m_input_tail + * │ + * ▼ + * ┌─────────┬─────────┬─────────┬─────────┬─────────┬─────────┬─────────┬─────────┬─────────┐ + * m_inputs: │ waiting │ waiting │ waiting │ waiting │ waiting │ waiting │ waiting │ waiting │ waiting │ + * │ │ │ │ │ │ │ │ │ │ + * └─────────┴─────────┴─────────┴─────────┴─────────┴─────────┴─────────┴─────────┴─────────┘ + * + * After workers start: + * + * Worker 2 Worker 0 Worker 3 Worker 1 m_input_head + * │ │ │ │ │ + * ▼ ▼ ▼ ▼ ▼ + * ┌─────────┬─────────┬─────────┬─────────┬─────────┬─────────┬─────────┬─────────┬─────────┐ + * m_inputs: │ ready │ ready │fetching │ ready │fetching │fetching │fetching │ waiting │ waiting │ + * │consumed │ ✓ │ ● │ ✓ │ ● │ ● │ ● │ │ │ + * └─────────┴─────────┴─────────┴─────────┴─────────┴─────────┴─────────┴─────────┴─────────┘ + * ▲ + * │ + * m_input_tail */ class CoinsViewOverlay : public CCoinsViewCache { private: + //! The latest input not yet being fetched. Workers atomically increment this when fetching. + mutable std::atomic_uint32_t m_input_head{0}; + //! The latest input not yet accessed by a consumer. Only the main thread increments this. + mutable uint32_t m_input_tail{0}; + + //! The inputs of the block which is being fetched. + struct InputToFetch { + //! Workers set this after setting the coin. The main thread tests this before reading the coin. + std::atomic_flag ready{}; + //! The outpoint of the input to fetch. + const COutPoint& outpoint; + //! The coin that workers will fetch and main thread will insert into cache. + std::optional coin{std::nullopt}; + + InputToFetch(const InputToFetch&) = delete; + InputToFetch& operator=(const InputToFetch&) = delete; + InputToFetch& operator=(InputToFetch&&) = delete; + /** + * Required only because std::vector needs move-constructible elements + * (atomic_flag is neither copyable nor movable, so the implicit move ctor is deleted). + * In practice we never reallocate: m_inputs is reserved to MAX_INPUTS_PER_BLOCK at + * CoinsViewOverlay construction, and that capacity is never exceeded, so this body + * must remain unreachable. + */ + InputToFetch(InputToFetch&& other) noexcept : outpoint{other.outpoint} + { + assert(false); + } + explicit InputToFetch(const COutPoint& o LIFETIMEBOUND) noexcept : outpoint{o} {} + }; + //! Must only be mutated when m_futures is empty. Elements may be mutated when m_futures is not empty. + mutable std::vector m_inputs{}; + + /** + * Set of block txids used in StartFetching to filter out inputs spending earlier transactions + * in the same block. Stored as a member so the bucket array is reused across blocks; cleared + * after each StartFetching loop. SaltedTxidHasher (SipHash-2-4) provides bucket selection + * with cryptographic-PRF properties; equality uses the full Txid. + * Must only be mutated when m_futures is empty. + */ + std::unordered_set m_txids; + + /** + * Claim and fetch the next input in the queue. Safe to call from any thread. + * + * @return true if there are more inputs in the queue to fetch + * @return false if there are no more inputs in the queue to fetch + */ + bool ProcessInput() const noexcept + { + const auto i{m_input_head.fetch_add(1, std::memory_order_relaxed)}; + if (i >= m_inputs.size()) return false; + + auto& input{m_inputs[i]}; + // m_inputs only contains inputs that are not same-block spends (StartFetching pre-filters + // them via m_txids), so every entry is expected to be in the base view if the block is valid. + if (auto coin{base->PeekCoin(input.outpoint)}) input.coin.emplace(std::move(*coin)); + // Use release so writing coin above happens before the main thread acquires. + input.ready.test_and_set(std::memory_order_release); + input.ready.notify_one(); + return true; + } + + //! Stop all worker threads and clear fetching data. + void StopFetching() noexcept + { + if (m_futures.empty()) { + // If fetching is already stopped, the per-block state must already be cleared. + Assert(m_inputs.empty()); + Assert(m_txids.empty()); + Assert(m_input_head.load(std::memory_order_relaxed) == 0); + Assert(m_input_tail == 0); + return; + } + // Skip fetching the rest of the inputs by moving the head to the end. + m_input_head.store(m_inputs.size(), std::memory_order_relaxed); + // Wait for all threads to stop. + for (auto& future : m_futures) future.wait(); + m_futures.clear(); + m_inputs.clear(); + m_input_head.store(0, std::memory_order_relaxed); + m_input_tail = 0; + } + std::optional FetchCoinFromBase(const COutPoint& outpoint) const override { + // This assumes ConnectBlock accesses all inputs in the same order as they are added to m_inputs + // in StartFetching. Some outpoints are not accessed because they are created by the block, so we scan until we + // come across the requested input. + for (const auto i : std::views::iota(m_input_tail, m_inputs.size())) { + auto& input{m_inputs[i]}; + if (input.outpoint != outpoint) continue; + // We advance the tail since the input is cached and not accessed through this method again. + m_input_tail = i + 1; + // Check if the coin is ready to be read. We need acquire so we match the worker thread's release. + while (!input.ready.test(std::memory_order_acquire)) { + // Work instead of waiting if the coin is not ready + if (!ProcessInput()) { + // No more work, just wait + input.ready.wait(/*old=*/false, std::memory_order_acquire); + break; + } + } + // The worker has already done base->PeekCoin; whether it succeeded or not, we can + // use its result directly. No need to fall back to PeekCoin again. + return std::move(input.coin); + } + + // The outpoint isn't a block input (e.g. BIP30 duplicate-txid check); fall back to base. return base->PeekCoin(outpoint); } + //! Non-null. May have zero workers when input fetching is disabled. + std::shared_ptr m_thread_pool; + std::vector> m_futures{}; + +protected: + void Reset() noexcept override + { + StopFetching(); + CCoinsViewCache::Reset(); + } + public: - using CCoinsViewCache::CCoinsViewCache; + explicit CoinsViewOverlay(CCoinsView* in_base, std::shared_ptr thread_pool, + bool deterministic = false) noexcept + : CCoinsViewCache{in_base, deterministic}, + m_thread_pool{std::move(thread_pool)} + { + Assume(m_thread_pool); + // Reserve to the worst-case so emplace_back in StartFetching never reallocates m_inputs. + // InputToFetch's move constructor aborts; this reservation makes that unreachable. + m_inputs.reserve(MAX_INPUTS_PER_BLOCK); + } + + //! Start fetching inputs from block in background. + [[nodiscard]] ResetGuard StartFetching(const CBlock& block LIFETIMEBOUND) noexcept + { + Assert(m_futures.empty()); + Assert(m_inputs.empty()); + Assert(m_txids.empty()); + Assert(m_input_head.load(std::memory_order_relaxed) == 0); + Assert(m_input_tail == 0); + if (const auto workers_count{m_thread_pool->WorkersCount()}; workers_count > 0) { + // Pre-filter same-block spends: iterate transactions in order, dropping any input + // whose prevout.hash matches an earlier transaction's txid in the block. Workers can + // then assume every entry in m_inputs is expected to be in the base view. + m_txids.reserve(block.vtx.size()); + for (const auto& tx : block.vtx | std::views::drop(1)) { + for (const auto& input : tx->vin) { + if (!m_txids.contains(input.prevout.hash)) m_inputs.emplace_back(input.prevout); + } + m_txids.emplace(tx->GetHash()); + } + // Same-block spends are now filtered out of m_inputs; m_txids is no longer needed + // until the next StartFetching. Cleared here so allocations are reused next call. + m_txids.clear(); + // Only start threads if we have something to fetch. + if (!m_inputs.empty()) { + std::vector> tasks(workers_count, [this] { + while (ProcessInput()) {} + }); + if (auto futures{m_thread_pool->Submit(std::move(tasks))}; futures.has_value()) { + m_futures = std::move(*futures); + } + } + if (m_futures.empty()) m_inputs.clear(); + } + return CreateResetGuard(); + } + + void SetBackend(CCoinsView& view) override + { + StopFetching(); + CCoinsViewCache::SetBackend(view); + } + + void Flush(bool reallocate_cache = true) override + { + StopFetching(); + CCoinsViewCache::Flush(reallocate_cache); + } + + void Sync() override + { + StopFetching(); + CCoinsViewCache::Sync(); + } }; //! Utility function to add all of a transaction's outputs to a cache. diff --git a/src/consensus/consensus.h b/src/consensus/consensus.h index 71b5fe2468d9..9d39060852de 100644 --- a/src/consensus/consensus.h +++ b/src/consensus/consensus.h @@ -20,6 +20,13 @@ static const int COINBASE_MATURITY = 100; static const int WITNESS_SCALE_FACTOR = 4; +/** The minimum serialized size of a CTxIn even with an empty scriptSig (32 byte txid + + * 4 byte vout + 1 byte scriptSig length + 4 byte sequence). */ +static constexpr unsigned int MIN_TXIN_SERIALIZED_SIZE = 41; +/** The maximum number of inputs that can fit in a block. Witness data is not stored in + * the input itself, so the bound is determined by the stripped block size. */ +static constexpr unsigned int MAX_INPUTS_PER_BLOCK = (MAX_BLOCK_WEIGHT / WITNESS_SCALE_FACTOR) / MIN_TXIN_SERIALIZED_SIZE; + static const size_t MIN_TRANSACTION_WEIGHT = WITNESS_SCALE_FACTOR * 60; // 60 is the lower bound for the size of a valid serialized CTransaction static const size_t MIN_SERIALIZABLE_TRANSACTION_WEIGHT = WITNESS_SCALE_FACTOR * 10; // 10 is the lower bound for the size of a serialized CTransaction diff --git a/src/init.cpp b/src/init.cpp index c53e5ed634c7..5e19ebcc4538 100644 --- a/src/init.cpp +++ b/src/init.cpp @@ -517,6 +517,7 @@ void SetupServerArgs(ArgsManager& argsman, bool can_listen_ipc) argsman.AddArg("-minimumchainwork=", strprintf("Minimum work assumed to exist on a valid chain in hex (default: %s, testnet3: %s, testnet4: %s, signet: %s)", defaultChainParams->GetConsensus().nMinimumChainWork.GetHex(), testnetChainParams->GetConsensus().nMinimumChainWork.GetHex(), testnet4ChainParams->GetConsensus().nMinimumChainWork.GetHex(), signetChainParams->GetConsensus().nMinimumChainWork.GetHex()), ArgsManager::ALLOW_ANY | ArgsManager::DEBUG_ONLY, OptionsCategory::OPTIONS); argsman.AddArg("-par=", strprintf("Set the number of script verification threads (0 = auto, up to %d, <0 = leave that many cores free, default: %d)", MAX_SCRIPTCHECK_THREADS, DEFAULT_SCRIPTCHECK_THREADS), ArgsManager::ALLOW_ANY, OptionsCategory::OPTIONS); + argsman.AddArg("-inputfetchthreads=", strprintf("Set the number of input fetch threads (0 disables, up to %d, default: %d). Negative values are rejected.", MAX_INPUTFETCH_THREADS, DEFAULT_INPUTFETCH_THREADS), ArgsManager::ALLOW_ANY, OptionsCategory::OPTIONS); argsman.AddArg("-persistmempool", strprintf("Whether to save the mempool on shutdown and load on restart (default: %u)", DEFAULT_PERSIST_MEMPOOL), ArgsManager::ALLOW_ANY, OptionsCategory::OPTIONS); argsman.AddArg("-persistmempoolv1", strprintf("Whether a mempool.dat file created by -persistmempool or the savemempool RPC will be written in the legacy format " diff --git a/src/kernel/CMakeLists.txt b/src/kernel/CMakeLists.txt index 541f10b3adce..d2a467f6955e 100644 --- a/src/kernel/CMakeLists.txt +++ b/src/kernel/CMakeLists.txt @@ -61,6 +61,7 @@ add_library(bitcoinkernel ../uint256.cpp ../util/chaintype.cpp ../util/check.cpp + ../util/exception.cpp ../util/expected.cpp ../util/feefrac.cpp ../util/fs.cpp @@ -70,6 +71,7 @@ add_library(bitcoinkernel ../util/rbf.cpp ../util/signalinterrupt.cpp ../util/syserror.cpp + ../util/thread.cpp ../util/threadnames.cpp ../util/time.cpp ../util/tokenpipe.cpp diff --git a/src/kernel/chainstatemanager_opts.h b/src/kernel/chainstatemanager_opts.h index 134b93194bf4..2a82f28a68e4 100644 --- a/src/kernel/chainstatemanager_opts.h +++ b/src/kernel/chainstatemanager_opts.h @@ -22,6 +22,7 @@ class CChainParams; class ValidationSignals; static constexpr auto DEFAULT_MAX_TIP_AGE{24h}; +static constexpr int32_t DEFAULT_INPUTFETCH_THREADS{4}; namespace kernel { @@ -46,6 +47,8 @@ struct ChainstateManagerOpts { ValidationSignals* signals{nullptr}; //! Number of script check worker threads. Zero means no parallel verification. int worker_threads_num{0}; + //! Number of input fetch worker threads. Zero means no parallel fetching. + int32_t inputfetch_threads_num{DEFAULT_INPUTFETCH_THREADS}; size_t script_execution_cache_bytes{DEFAULT_SCRIPT_EXECUTION_CACHE_BYTES}; size_t signature_cache_bytes{DEFAULT_SIGNATURE_CACHE_BYTES}; }; diff --git a/src/node/chainstatemanager_args.cpp b/src/node/chainstatemanager_args.cpp index bf91a750c140..22e890d78c4c 100644 --- a/src/node/chainstatemanager_args.cpp +++ b/src/node/chainstatemanager_args.cpp @@ -60,6 +60,13 @@ util::Result ApplyArgsManOptions(const ArgsManager& args, ChainstateManage // Subtract 1 because the main thread counts towards the par threads. opts.worker_threads_num = script_threads - 1; + if (auto value{args.GetIntArg("-inputfetchthreads")}) { + if (*value < 0) { + return util::Error{Untranslated(strprintf("-inputfetchthreads must be non-negative (got %d). Use 0 to disable input fetching.", *value))}; + } + opts.inputfetch_threads_num = int32_t(std::min(*value, MAX_INPUTFETCH_THREADS)); + } + if (auto max_size = args.GetIntArg("-maxsigcachesize")) { // 1. When supplied with a max_size of 0, both the signature cache and // script execution cache create the minimum possible cache (2 diff --git a/src/test/CMakeLists.txt b/src/test/CMakeLists.txt index 1365d6c147a6..ce181cbe3c00 100644 --- a/src/test/CMakeLists.txt +++ b/src/test/CMakeLists.txt @@ -127,6 +127,7 @@ add_executable(test_bitcoin uint256_tests.cpp util_check_tests.cpp util_expected_tests.cpp + util_hasher_tests.cpp util_string_tests.cpp util_tests.cpp util_threadnames_tests.cpp diff --git a/src/test/coinsviewoverlay_tests.cpp b/src/test/coinsviewoverlay_tests.cpp index 6b20b31211a6..cea20f61158a 100644 --- a/src/test/coinsviewoverlay_tests.cpp +++ b/src/test/coinsviewoverlay_tests.cpp @@ -3,6 +3,7 @@ // file COPYING or http://www.opensource.org/licenses/mit-license.php. #include +#include #include #include #include @@ -10,12 +11,15 @@ #include #include #include +#include #include #include #include +#include #include +#include BOOST_AUTO_TEST_SUITE(coinsviewoverlay_tests) @@ -29,10 +33,15 @@ CBlock CreateBlock() noexcept coinbase.vin.emplace_back(); block.vtx.push_back(MakeTransactionRef(coinbase)); + Txid prevhash{Txid::FromUint256(uint256{1})}; + for (const auto i : std::views::iota(1, NUM_TXS)) { CMutableTransaction tx; - Txid txid{Txid::FromUint256(uint256(i))}; + // Alternate between external inputs (fetched by workers) and same-block spends of the + // previous tx (filtered out of m_inputs by StartFetching). + const Txid txid{i % 2 == 0 ? Txid::FromUint256(uint256(i)) : prevhash}; tx.vin.emplace_back(txid, 0); + prevhash = tx.GetHash(); block.vtx.push_back(MakeTransactionRef(tx)); } @@ -44,20 +53,37 @@ void PopulateView(const CBlock& block, CCoinsView& view, bool spent = false) CCoinsViewCache cache{&view}; cache.SetBestBlock(uint256::ONE); + std::unordered_set txids{}; + txids.reserve(block.vtx.size() - 1); for (const auto& tx : block.vtx | std::views::drop(1)) { for (const auto& in : tx->vin) { + if (txids.contains(in.prevout.hash)) continue; Coin coin{}; if (!spent) coin.out.nValue = 1; cache.EmplaceCoinInternalDANGER(COutPoint{in.prevout}, std::move(coin)); } + txids.emplace(tx->GetHash()); } cache.Flush(); } +//! Returns a started thread pool shared across tests, mirroring how production reuses pools. +std::shared_ptr StartedThreadPool() +{ + static const auto thread_pool{[] { + auto pool{std::make_shared("fetch_test")}; + pool->Start(DEFAULT_INPUTFETCH_THREADS); + return pool; + }()}; + return thread_pool; +} + void CheckCache(const CBlock& block, const CCoinsViewCache& cache) { uint32_t counter{0}; + std::unordered_set txids{}; + txids.reserve(block.vtx.size() - 1); for (const auto& tx : block.vtx) { if (tx->IsCoinBase()) { @@ -68,9 +94,12 @@ void CheckCache(const CBlock& block, const CCoinsViewCache& cache) const auto& first{cache.AccessCoin(outpoint)}; const auto& second{cache.AccessCoin(outpoint)}; BOOST_CHECK_EQUAL(&first, &second); - ++counter; - BOOST_CHECK(cache.HaveCoinInCache(outpoint)); + const auto should_have{!txids.contains(outpoint.hash)}; + if (should_have) ++counter; + const auto have{cache.HaveCoinInCache(outpoint)}; + BOOST_CHECK_EQUAL(should_have, have); } + txids.emplace(tx->GetHash()); } } BOOST_CHECK_EQUAL(cache.GetCacheSize(), counter); @@ -84,7 +113,11 @@ BOOST_AUTO_TEST_CASE(fetch_inputs_from_db) CCoinsViewDB db{{.path = "", .cache_bytes = 1_MiB, .memory_only = true}, {}}; PopulateView(block, db); CCoinsViewCache main_cache{&db}; - CoinsViewOverlay view{&main_cache}; + // Non-deterministic so the QuickHasher in m_txids uses a random key, exercising the bucket + // distribution path. Same-block spends are pre-filtered out of m_inputs in StartFetching; + // every other input (including the swap_ranges case) is fetched by workers. + CoinsViewOverlay view{&main_cache, StartedThreadPool(), /*deterministic=*/false}; + const auto reset_guard{view.StartFetching(block)}; const auto& outpoint{block.vtx[1]->vin[0].prevout}; BOOST_CHECK(view.HaveCoin(outpoint)); @@ -111,7 +144,8 @@ BOOST_AUTO_TEST_CASE(fetch_inputs_from_cache) CCoinsViewDB db{{.path = "", .cache_bytes = 1_MiB, .memory_only = true}, {}}; CCoinsViewCache main_cache{&db}; PopulateView(block, main_cache); - CoinsViewOverlay view{&main_cache}; + CoinsViewOverlay view{&main_cache, StartedThreadPool(), /*deterministic=*/true}; + const auto reset_guard{view.StartFetching(block)}; CheckCache(block, view); const auto& outpoint{block.vtx[1]->vin[0].prevout}; @@ -131,7 +165,8 @@ BOOST_AUTO_TEST_CASE(fetch_no_double_spend) CCoinsViewCache main_cache{&db}; // Add all inputs as spent already in cache PopulateView(block, main_cache, /*spent=*/true); - CoinsViewOverlay view{&main_cache}; + CoinsViewOverlay view{&main_cache, StartedThreadPool(), /*deterministic=*/true}; + const auto reset_guard{view.StartFetching(block)}; for (const auto& tx : block.vtx) { for (const auto& in : tx->vin) { const auto& c{view.AccessCoin(in.prevout)}; @@ -149,7 +184,8 @@ BOOST_AUTO_TEST_CASE(fetch_no_inputs) const auto block{CreateBlock()}; CCoinsViewDB db{{.path = "", .cache_bytes = 1_MiB, .memory_only = true}, {}}; CCoinsViewCache main_cache{&db}; - CoinsViewOverlay view{&main_cache}; + CoinsViewOverlay view{&main_cache, StartedThreadPool(), /*deterministic=*/true}; + const auto reset_guard{view.StartFetching(block)}; for (const auto& tx : block.vtx) { for (const auto& in : tx->vin) { const auto& c{view.AccessCoin(in.prevout)}; @@ -161,5 +197,44 @@ BOOST_AUTO_TEST_CASE(fetch_no_inputs) BOOST_CHECK_EQUAL(view.GetCacheSize(), 0); } +// Access coins that are not block inputs +BOOST_AUTO_TEST_CASE(access_non_input_coins) +{ + const auto block{CreateBlock()}; + CCoinsViewDB db{{.path = "", .cache_bytes = 1_MiB, .memory_only = true}, {}}; + CCoinsViewCache main_cache{&db}; + + Coin coin{}; + coin.out.nValue = 1; + const COutPoint outpoint{Txid::FromUint256(uint256::ZERO), 0}; + main_cache.EmplaceCoinInternalDANGER(COutPoint{outpoint}, std::move(coin)); + const COutPoint missing_outpoint{Txid::FromUint256(uint256::ONE), 0}; + + CoinsViewOverlay view{&main_cache, StartedThreadPool(), /*deterministic=*/true}; + const auto reset_guard{view.StartFetching(block)}; + + // Non-input fallback hit. + const auto& accessed_coin{view.AccessCoin(outpoint)}; + BOOST_CHECK(!accessed_coin.IsSpent()); + + // Non-input fallback miss. + const auto& missing_coin{view.AccessCoin(missing_outpoint)}; + BOOST_CHECK(missing_coin.IsSpent()); + BOOST_CHECK(!view.HaveCoinInCache(missing_outpoint)); +} + +// Test that disabled input fetching leaves normal cache lookups available. +BOOST_AUTO_TEST_CASE(fetch_disabled_uses_normal_lookup) +{ + const auto block{CreateBlock()}; + CCoinsViewDB db{{.path = "", .cache_bytes = 1_MiB, .memory_only = true}, {}}; + CCoinsViewCache main_cache{&db}; + PopulateView(block, main_cache); + auto thread_pool{std::make_shared("fetch_none")}; + CoinsViewOverlay view{&main_cache, thread_pool, /*deterministic=*/false}; + const auto reset_guard{view.StartFetching(block)}; + CheckCache(block, view); +} + BOOST_AUTO_TEST_SUITE_END() diff --git a/src/test/fuzz/coins_view.cpp b/src/test/fuzz/coins_view.cpp index c11581d2d3c3..93dfda03cec5 100644 --- a/src/test/fuzz/coins_view.cpp +++ b/src/test/fuzz/coins_view.cpp @@ -7,7 +7,9 @@ #include #include #include +#include #include +#include #include #include