From d3e36becf56f0d24433c933641211a62beffa35d Mon Sep 17 00:00:00 2001 From: Luca Burelli Date: Thu, 18 Dec 2025 15:04:07 +0100 Subject: [PATCH 01/22] ccache: fix workflow, use per-board cache Do not recycle the cache among different board builds, improves cache hit rate. Signed-off-by: Luca Burelli --- .github/workflows/package_core.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/package_core.yml b/.github/workflows/package_core.yml index cc88a5f89..e869d6137 100644 --- a/.github/workflows/package_core.yml +++ b/.github/workflows/package_core.yml @@ -21,7 +21,7 @@ jobs: run: | sudo apt-get remove --purge man-db -y # skips the mandb triggers sudo apt-get update - sudo apt-get install -y --no-install-recommends git cmake wget python3-pip ninja-build ccache + sudo apt-get install -y --no-install-recommends git cmake wget python3-pip ninja-build - uses: actions/checkout@v4 with: @@ -80,12 +80,16 @@ jobs: - name: Restore build environment run: | + sudo apt-get remove --purge man-db -y # skips the mandb triggers + sudo apt-get update + sudo apt-get install -y --no-install-recommends git cmake wget python3-pip ninja-build ccache (cd ~ && tar --use-compress-program=unzstd -xpf build-env.tar.zstd && rm build-env.tar.zstd) - name: ccache uses: hendrikmuhs/ccache-action@v1.2 with: verbose: 1 + key: ${{ github.job }}-${{ matrix.board }} - name: Build loader shell: bash From 3a46a9757c2986f3cc7794b0af52568bb3a19e58 Mon Sep 17 00:00:00 2001 From: Luca Burelli Date: Fri, 9 Jan 2026 13:35:19 +0100 Subject: [PATCH 02/22] SPI: warning cleanups Fix various warnings in SPI.cpp Signed-off-by: Luca Burelli --- libraries/SPI/SPI.cpp | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/libraries/SPI/SPI.cpp b/libraries/SPI/SPI.cpp index 1a0b39115..974bdcc9b 100644 --- a/libraries/SPI/SPI.cpp +++ b/libraries/SPI/SPI.cpp @@ -33,8 +33,6 @@ void arduino::ZephyrSPI::transfer(void *buf, size_t count) { } int arduino::ZephyrSPI::transfer(void *buf, size_t len, const struct spi_config *config) { - int ret; - const struct spi_buf tx_buf = {.buf = buf, .len = len}; const struct spi_buf_set tx_buf_set = { .buffers = &tx_buf, @@ -51,9 +49,11 @@ int arduino::ZephyrSPI::transfer(void *buf, size_t len, const struct spi_config } void arduino::ZephyrSPI::usingInterrupt(int interruptNumber) { + ARG_UNUSED(interruptNumber); } void arduino::ZephyrSPI::notUsingInterrupt(int interruptNumber) { + ARG_UNUSED(interruptNumber); } void arduino::ZephyrSPI::beginTransaction(SPISettings settings) { @@ -96,12 +96,12 @@ void arduino::ZephyrSPI::beginTransaction(SPISettings settings) { // Set SPI configuration structure for 8-bit transfers memset(&config, 0, sizeof(struct spi_config)); config.operation = mode | SPI_WORD_SET(8); - config.frequency = max(SPI_MIN_CLOCK_FREQUENCY, settings.getClockFreq()); + config.frequency = max((uint32_t)SPI_MIN_CLOCK_FREQUENCY, settings.getClockFreq()); // Set SPI configuration structure for 16-bit transfers memset(&config16, 0, sizeof(struct spi_config)); config16.operation = mode | SPI_WORD_SET(16); - config16.frequency = max(SPI_MIN_CLOCK_FREQUENCY, settings.getClockFreq()); + config16.frequency = max((uint32_t)SPI_MIN_CLOCK_FREQUENCY, settings.getClockFreq()); } void arduino::ZephyrSPI::endTransaction(void) { From d44ed9cfc8f52b24059b755d7c4e4b1cbcdd7643 Mon Sep 17 00:00:00 2001 From: Luca Burelli Date: Fri, 9 Jan 2026 13:53:59 +0100 Subject: [PATCH 03/22] loader: add spdx support Add full SPDX support to the build system to generate SPDX documents alongside the build artifacts. Signed-off-by: Luca Burelli --- extra/build.sh | 1 + loader/prj.conf | 1 + 2 files changed, 2 insertions(+) diff --git a/extra/build.sh b/extra/build.sh index 2b2bac7d5..22700dc81 100755 --- a/extra/build.sh +++ b/extra/build.sh @@ -73,6 +73,7 @@ fi BUILD_DIR=build/${variant} VARIANT_DIR=variants/${variant} rm -rf ${BUILD_DIR} +west spdx --init -d ${BUILD_DIR} west build -d ${BUILD_DIR} -b ${target} loader -t llext-edk ${args} # Extract the generated EDK tarball and copy it to the variant directory diff --git a/loader/prj.conf b/loader/prj.conf index dc40e9533..567b081e3 100644 --- a/loader/prj.conf +++ b/loader/prj.conf @@ -4,6 +4,7 @@ CONFIG_USERSPACE=n CONFIG_ARM_MPU=n +CONFIG_BUILD_OUTPUT_META=y CONFIG_LOG=y CONFIG_LOG_MODE_IMMEDIATE=y From e89ad81e01abfece4a152e13fb3868294808ddb1 Mon Sep 17 00:00:00 2001 From: Luca Burelli Date: Fri, 9 Jan 2026 13:56:31 +0100 Subject: [PATCH 04/22] package_core: limit libraries per artifact Properly implement aritfact-specific inclusion and exclusion of libraries in the core packaging script. Signed-off-by: Luca Burelli --- extra/artifacts/zephyr_contrib.exc | 3 +++ extra/artifacts/zephyr_unoq.exc | 5 +++++ extra/artifacts/zephyr_unoq.only | 3 +++ extra/package_core.sh | 8 ++++++-- 4 files changed, 17 insertions(+), 2 deletions(-) create mode 100644 extra/artifacts/zephyr_contrib.exc create mode 100644 extra/artifacts/zephyr_unoq.exc create mode 100644 extra/artifacts/zephyr_unoq.only diff --git a/extra/artifacts/zephyr_contrib.exc b/extra/artifacts/zephyr_contrib.exc new file mode 100644 index 000000000..794d52fc4 --- /dev/null +++ b/extra/artifacts/zephyr_contrib.exc @@ -0,0 +1,3 @@ +libraries/Camera/ +libraries/Storage/ +libraries/Zephyr_SDRAM/ diff --git a/extra/artifacts/zephyr_unoq.exc b/extra/artifacts/zephyr_unoq.exc new file mode 100644 index 000000000..eb3870852 --- /dev/null +++ b/extra/artifacts/zephyr_unoq.exc @@ -0,0 +1,5 @@ +libraries/Camera/ +libraries/Ethernet/ +libraries/Storage/ +libraries/WiFi/ +libraries/Zephyr_SDRAM/ diff --git a/extra/artifacts/zephyr_unoq.only b/extra/artifacts/zephyr_unoq.only new file mode 100644 index 000000000..af7b603ee --- /dev/null +++ b/extra/artifacts/zephyr_unoq.only @@ -0,0 +1,3 @@ +libraries/Arduino_LED_Matrix/ +libraries/Arduino_RouterBridge/ +libraries/Arduino_RPClite/ diff --git a/extra/package_core.sh b/extra/package_core.sh index 191678bc3..02a24ddee 100755 --- a/extra/package_core.sh +++ b/extra/package_core.sh @@ -59,9 +59,9 @@ cat platform.txt > ${TEMP_PLATFORM} sed -ie "s/^version=.*/version=$(extra/get_core_version.sh)/" ${TEMP_PLATFORM} declutter_file() { - # remove comments and empty lines + # remove comments, whitespace at EOL, '/' dir terminators and empty lines [ -f "$1" ] || return 0 - cat "$1" | sed -e 's/\s*#.*//' | grep -v '^\s*$' + cat "$1" | sed -e 's/\s*#.*//' -e 's/\s*$//' -e 's/\/$//' | grep -v '^\s*$' } # create the list of files and directories to include @@ -70,6 +70,7 @@ echo ${TEMP_BOARDS} >> ${TEMP_INC} echo ${TEMP_PLATFORM} >> ${TEMP_INC} declutter_file extra/artifacts/_common.inc >> ${TEMP_INC} declutter_file extra/artifacts/$ARTIFACT.inc >> ${TEMP_INC} +declutter_file extra/artifacts/$ARTIFACT.only >> ${TEMP_INC} for variant in $INCLUDED_VARIANTS ; do echo "- ${variant}" echo "variants/${variant}/" >> ${TEMP_INC} @@ -84,6 +85,9 @@ done TEMP_EXC=$(mktemp -p . | sed 's/\.\///') declutter_file extra/artifacts/_common.exc >> ${TEMP_EXC} declutter_file extra/artifacts/$ARTIFACT.exc >> ${TEMP_EXC} +for f in $(ls extra/artifacts/*.only | grep -v "$ARTIFACT.only") ; do + declutter_file $f >> ${TEMP_EXC} +done mkdir -p $(dirname ${OUTPUT_FILE}) tar -cjhf ${OUTPUT_FILE} -X ${TEMP_EXC} -T ${TEMP_INC} \ From 3fb3897bbd3846b395fd41f6649774c04568a90f Mon Sep 17 00:00:00 2001 From: Luca Burelli Date: Fri, 9 Jan 2026 13:59:21 +0100 Subject: [PATCH 05/22] nicla_sense: disable SPI to avoid conflict with UART SPI and UART cannot be used together on the Nicla Sense ME as they share the same controller and only one can currently be active. For the moment, prefer to keep UART enabled and disable SPI in the device tree. Signed-off-by: Luca Burelli --- .../arduino_nicla_sense_me_nrf52832.overlay | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/variants/arduino_nicla_sense_me_nrf52832/arduino_nicla_sense_me_nrf52832.overlay b/variants/arduino_nicla_sense_me_nrf52832/arduino_nicla_sense_me_nrf52832.overlay index b9cd0dd2b..c7443b678 100644 --- a/variants/arduino_nicla_sense_me_nrf52832/arduino_nicla_sense_me_nrf52832.overlay +++ b/variants/arduino_nicla_sense_me_nrf52832/arduino_nicla_sense_me_nrf52832.overlay @@ -38,6 +38,6 @@ serials = <&uart0>; i2cs = <&i2c1>; - spis = <&spi1>; + spis = <>; /* spi0, conflicts with uart0 */ }; }; From 9a8d57b8567d76f69921528a52a2db21cf4f8865 Mon Sep 17 00:00:00 2001 From: Luca Burelli Date: Fri, 9 Jan 2026 14:02:44 +0100 Subject: [PATCH 06/22] workflows: properly trigger on main branch Avoid multiple runs when pushing to branches in your own fork. Only run on pushes to main or pull requests. Signed-off-by: Luca Burelli --- .github/workflows/build.yml | 6 +++++- .github/workflows/package_core.yml | 6 ++++-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 7ea17b70f..0076045b6 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,6 +1,10 @@ name: Build native Zephyr samples -on: [push, pull_request] +on: + push: + branches: + - main + pull_request: jobs: build: diff --git a/.github/workflows/package_core.yml b/.github/workflows/package_core.yml index e869d6137..5374cdbc8 100644 --- a/.github/workflows/package_core.yml +++ b/.github/workflows/package_core.yml @@ -1,8 +1,10 @@ name: Package, test and upload core on: - - push - - pull_request + push: + branches: + - main + pull_request: jobs: From 4c9ce5bebdbc15f7c3175f54f90105d18ddf8cc7 Mon Sep 17 00:00:00 2001 From: Luca Burelli Date: Fri, 9 Jan 2026 14:35:18 +0100 Subject: [PATCH 07/22] get_core_version: override branch name in CI In GitHub Actions, checked out code in a pull_request workflow does check out the temporary merge commit by default, resulting in different SHAs and missed tags with 'git describe'. Using BRANCH_NAME when set ensures that git describe operates on the correct branch or tag ref. Signed-off-by: Luca Burelli --- .github/workflows/package_core.yml | 3 +++ extra/get_core_version.sh | 9 +++++++-- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/.github/workflows/package_core.yml b/.github/workflows/package_core.yml index 5374cdbc8..37e5eb3f8 100644 --- a/.github/workflows/package_core.yml +++ b/.github/workflows/package_core.yml @@ -6,6 +6,9 @@ on: - main pull_request: +env: + BRANCH_NAME: ${{ github.head_ref || github.ref_name }} + jobs: build-env: diff --git a/extra/get_core_version.sh b/extra/get_core_version.sh index 5cfdf6b7c..18687f542 100755 --- a/extra/get_core_version.sh +++ b/extra/get_core_version.sh @@ -26,9 +26,14 @@ # If there are no tags at all (for example when run in a fork etc), it defaults # to "9.9.9-+". -VERSION=$(git describe --tags --exact-match 2>/dev/null) +# In CI, BRANCH_NAME points to the branch or tag ref that triggered the +# workflow run (e.g., "refs/heads/main" or "refs/tags/v1.2.3"). Without this, +# git describe would use HEAD, which is a temporary detachead commit. +# If BRANCH_NAME is not set, the command falls back to using HEAD. + +VERSION=$(git describe --tags --exact-match ${BRANCH_NAME} 2>/dev/null) if [ -z "$VERSION" ]; then - VERSION=$(git describe --tags --dirty 2>/dev/null | + VERSION=$(git describe --tags --dirty ${BRANCH_NAME} 2>/dev/null | sed 's/\.\([[:digit:]]\+\)\(-.*\)*-[[:digit:]]\+-g/ \1 \2 /' | awk '{ if (NF==3) { print $1 "." ($2+1) "-0.dev+" $3 } else { print $1 "." $2 $3 "-0.dev+" $4 }}') if [ -z "$VERSION" ]; then From 01d66ba91991a2916bccea4da5323794579b101a Mon Sep 17 00:00:00 2001 From: Luca Burelli Date: Fri, 9 Jan 2026 16:09:05 +0100 Subject: [PATCH 08/22] package_core: refact 1/x: per-variant test lists, compile Instead of compiling a fixed Blink sketch for each board, generate a list of all examples and libraries to compile based on the variant (and core). This allows to skip examples that are not applicable for a given variant (e.g. using hardware features not present on the board), and to add additional libraries and examples to the test suite as required. Signed-off-by: Luca Burelli --- .github/workflows/package_core.yml | 31 ++++-- extra/artifacts/_common.test_setup.sh | 28 +++++ extra/artifacts/zephyr_main.test_setup.sh | 20 ++++ extra/artifacts/zephyr_unoq.test_setup.sh | 13 +++ extra/ci_test_list.sh | 105 ++++++++++++++++++ .../skip_these_examples.txt | 8 ++ .../skip_these_examples.txt | 13 +++ .../skip_these_examples.txt | 16 +++ .../skip_these_examples.txt | 16 +++ .../skip_these_examples.txt | 9 ++ .../skip_these_examples.txt | 8 ++ .../skip_these_examples.txt | 7 ++ .../skip_these_examples.txt | 9 ++ .../skip_these_examples.txt | 8 ++ .../skip_these_examples.txt | 8 ++ .../frdm_rw612_rw612/skip_these_examples.txt | 8 ++ 16 files changed, 297 insertions(+), 10 deletions(-) create mode 100644 extra/artifacts/_common.test_setup.sh create mode 100644 extra/artifacts/zephyr_main.test_setup.sh create mode 100644 extra/artifacts/zephyr_unoq.test_setup.sh create mode 100755 extra/ci_test_list.sh create mode 100644 variants/arduino_giga_r1_stm32h747xx_m7/skip_these_examples.txt create mode 100644 variants/arduino_nano_33_ble_nrf52840_sense/skip_these_examples.txt create mode 100644 variants/arduino_nano_matter_mgm240sd22vna/skip_these_examples.txt create mode 100644 variants/arduino_nicla_sense_me_nrf52832/skip_these_examples.txt create mode 100644 variants/arduino_opta_stm32h747xx_m7/skip_these_examples.txt create mode 100644 variants/arduino_portenta_c33_r7fa6m5bh3cfc/skip_these_examples.txt create mode 100644 variants/arduino_portenta_h7_stm32h747xx_m7/skip_these_examples.txt create mode 100644 variants/arduino_uno_q_stm32u585xx/skip_these_examples.txt create mode 100644 variants/ek_ra8d1_r7fa8d1bhecbd/skip_these_examples.txt create mode 100644 variants/frdm_mcxn947_mcxn947_cpu0/skip_these_examples.txt create mode 100644 variants/frdm_rw612_rw612/skip_these_examples.txt diff --git a/.github/workflows/package_core.yml b/.github/workflows/package_core.yml index 37e5eb3f8..a6c621ee3 100644 --- a/.github/workflows/package_core.yml +++ b/.github/workflows/package_core.yml @@ -131,6 +131,7 @@ jobs: - build-env - build-board env: + ALL_BOARD_DATA: ${{ needs.build-env.outputs.ALL_BOARD_DATA }} CORE_ARTIFACT: ArduinoCore-${{ matrix.artifact }}-${{ needs.build-env.outputs.CORE_HASH }} CORE_TAG: ${{ needs.build-env.outputs.CORE_TAG }} strategy: @@ -177,7 +178,6 @@ jobs: runs-on: ubuntu-latest needs: - package-core - if: always() steps: - uses: geekyeggo/delete-artifact@v5.1.0 with: @@ -202,8 +202,17 @@ jobs: PLAT: arduino:${{ matrix.subarch }} FQBN: arduino:${{ matrix.subarch }}:${{ matrix.board }} CORE_ARTIFACT: ArduinoCore-${{ matrix.artifact }}-${{ needs.build-env.outputs.CORE_HASH }} + ARTIFACT_TAG: ${{ needs.build-env.outputs.CORE_HASH }}-${{ matrix.board }} if: ${{ !cancelled() && needs.build-env.result == 'success' }} steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + persist-credentials: false + sparse-checkout: | + extra/ci_test_list.sh + extra/artifacts/ + - uses: actions/download-artifact@v4 with: name: ${{ env.CORE_ARTIFACT }} @@ -213,22 +222,24 @@ jobs: tar xf ${CORE_ARTIFACT}.tar.bz2 # will create ArduinoCore-zephyr/ echo "REPORT_FILE=$(echo ${FQBN} | tr ':' '-').json" >> $GITHUB_ENV - - name: Create Blink sketch + - name: Get test sketches run: | - mkdir Blink/ - wget -nv https://raw.githubusercontent.com/arduino/arduino-examples/refs/heads/main/examples/01.Basics/Blink/Blink.ino -P Blink/ + # sets ALL_TESTS and ALL_LIBRARIES env vars + extra/ci_test_list.sh ${{ matrix.artifact }} ${{ matrix.variant }} - - name: Compile Blink for ${{ matrix.board }} + - name: Compile tests for ${{ matrix.board }} uses: pillo79/compile-sketches@main with: fqbn: ${{ env.FQBN }} platforms: | - # Use Board Manager to install the latest release of Arduino Zephyr Boards to get the toolchain - - name: "arduino:zephyr" - source-url: "https://downloads.arduino.cc/packages/package_zephyr_index.json" + # Use Board Manager version first, to install the toolchain + - name: ${{ env.PLAT }} - name: ${{ env.PLAT }} source-path: "ArduinoCore-zephyr" - sketch-paths: Blink + sketch-paths: | + ${{ env.ALL_TESTS }} + libraries: | + ${{ env.ALL_LIBRARIES }} cli-compile-flags: | - '--build-property' - 'compiler.c.extra_flags=-Wno-type-limits -Wno-missing-field-initializers' @@ -262,7 +273,7 @@ jobs: - uses: actions/upload-artifact@v4 if: ${{ success() || failure() }} with: - name: test-report-${{ needs.build-env.outputs.CORE_TAG }}-${{ matrix.board }} + name: test-report-${{ env.ARTIFACT_TAG }} path: sketches-reports/* collect-logs: diff --git a/extra/artifacts/_common.test_setup.sh b/extra/artifacts/_common.test_setup.sh new file mode 100644 index 000000000..79fa43f21 --- /dev/null +++ b/extra/artifacts/_common.test_setup.sh @@ -0,0 +1,28 @@ +# This script is sourced from extra/ci_test_list.sh to provide +# artifact-specific tests for Zephyr CI tests. +# +# Two helper functions are provided for easy GitHub queries: +# - get_branch_tip [ ...] +# - get_latest_release [ ...] +# +# By default, the whole project will be added to the test suite. +# When given additional path arguments, the functions will only +# copy artifacts under the provided paths. + +if [ "$ARTIFACT" == "zephyr_contrib" ] ; then + # Minimal safety test for Zephyr contrib boards + get_branch_tip examples arduino/arduino-examples main \ + examples/01.Basics/Blink +else + # Get a few core Arduino examples + get_branch_tip examples arduino/arduino-examples main \ + examples/01.Basics/Blink \ + examples/01.Basics/AnalogReadSerial \ + examples/04.Communication/SerialPassthrough \ + + # Smoke test for C++ features + get_latest_release libraries arduino-libraries/Arduino_JSON \ + + # Smoke test for SPI API compatibilty + # get_branch_tip libraries PaulStoffregen/SerialFlash master +fi diff --git a/extra/artifacts/zephyr_main.test_setup.sh b/extra/artifacts/zephyr_main.test_setup.sh new file mode 100644 index 000000000..5f2424927 --- /dev/null +++ b/extra/artifacts/zephyr_main.test_setup.sh @@ -0,0 +1,20 @@ +# This script is sourced from extra/ci_test_list.sh to provide +# artifact-specific tests for Zephyr CI tests. +# +# Two helper functions are provided for easy GitHub queries: +# - get_branch_tip [ ...] +# - get_latest_release [ ...] +# +# By default, the whole project will be added to the test suite. +# When given additional path arguments, the functions will only +# copy artifacts under the provided paths. + +# ArduinoBLE +get_branch_tip libraries arduino-libraries/ArduinoBLE master \ + examples/Central/LedControl \ + examples/Central/Scan \ + examples/Peripheral/Advertising/EnhancedAdvertising \ + examples/Peripheral/ButtonLED \ + +# Arduino_SecureElement +get_latest_release libraries arduino-libraries/Arduino_SecureElement diff --git a/extra/artifacts/zephyr_unoq.test_setup.sh b/extra/artifacts/zephyr_unoq.test_setup.sh new file mode 100644 index 000000000..5af16b32e --- /dev/null +++ b/extra/artifacts/zephyr_unoq.test_setup.sh @@ -0,0 +1,13 @@ +# This script is sourced from extra/ci_test_list.sh to provide +# artifact-specific tests for Zephyr CI tests. +# +# Two helper functions are provided for easy GitHub queries: +# - get_branch_tip [ ...] +# - get_latest_release [ ...] +# +# By default, the whole project will be added to the test suite. +# When given additional path arguments, the functions will only +# copy artifacts under the provided paths. + +# ArduinoBLE +get_branch_tip libraries arduino-libraries/ArduinoBLE master diff --git a/extra/ci_test_list.sh b/extra/ci_test_list.sh new file mode 100755 index 000000000..d8fc1f797 --- /dev/null +++ b/extra/ci_test_list.sh @@ -0,0 +1,105 @@ +#!/bin/bash +# +# This script generates a list of all libraries and their dependencies for use +# in GitHub Actions environment variables. It also generates a list of all example +# .ino files, excluding those specified in a variant's skip list. +# +# The core under test should be extracted in the 'ArduinoCore-zephyr' subdirectory. + +if [ "$#" -lt 2 ] ; then + echo "Usage: $0 [...]" + exit 1 +fi + +if [ -z "$GITHUB_ENV" ] || [ ! -d ArduinoCore-zephyr/ ]; then + echo "Not in a Github CI run, cannot proceed." + exit 1 +fi + +ARTIFACT=$1 +VARIANT_DIR="ArduinoCore-zephyr/variants/$2" +shift 2 + +search_for_sketches_in() { + local folder="$1" + find "$folder" -name *.ino 2>/dev/null | sed -e 's/^\.\///' +} + +fetch_and_extract() { + local temp_file=$(mktemp).tar.gz + local temp_dir=$(mktemp -d) + local link="$1" + local inner_folder="${temp_dir}/$2" + local output_folder="$3" + shift 3 + + wget -nv "$link" -O "$temp_file" + tar -xzf "$temp_file" -C "$temp_dir" + + mkdir -p "$(dirname $output_folder)" + mv $inner_folder "$output_folder" + if [ $# -eq 0 ] ; then + # Search entire project for tests + search_for_sketches_in "$output_folder" >> $ALL_TESTS + else + # Search only specified paths for tests + for item in "$@" ; do + search_for_sketches_in "$output_folder/${item}" >> $ALL_TESTS + done + fi + rm -rf $tmpdir +} + +get_latest_release() { + local folder="$1" + local repo="$2" + local project="${repo##*/}" + local url=$(curl -s "https://api.github.com/repos/${repo}/releases/latest" | jq -r '.tarball_url') + shift 2 + + echo "Getting latest release for ${repo}" + + fetch_and_extract "$url" "*-${project}-*" "ArduinoCore-zephyr/${folder}/${project}" "$@" +} + +get_branch_tip() { + local folder="$1" + local repo="$2" + local branch="$3" + local project="${repo##*/}" + local url="https://github.com/${repo}/archive/refs/heads/${branch}.tar.gz" + shift 3 + + echo "Getting branch ${branch} of ${repo}" + + fetch_and_extract "$url" "${project}-${branch}" "ArduinoCore-zephyr/${folder}/${project}" "$@" +} + +ALL_TESTS=$(mktemp) +search_for_sketches_in ArduinoCore-zephyr/libraries/ >> $ALL_TESTS +search_for_sketches_in ArduinoCore-zephyr/examples/ >> $ALL_TESTS + +# Source common and artifact-specific scripts to get additional libraries in +[ -f extra/artifacts/_common.test_setup.sh ] && . extra/artifacts/_common.test_setup.sh +[ -f extra/artifacts/${ARTIFACT}.test_setup.sh ] && . extra/artifacts/${ARTIFACT}.test_setup.sh + +echo "ALL_LIBRARIES<> $GITHUB_ENV +find ArduinoCore-zephyr/libraries/ -name library.properties | while read -r propfile; do + # Version constraints are ignored as they are not supported by compile-sketches + grep '^depends=' "$propfile" | cut -d= -f2- | tr ',' '\n' | sed -e 's/\s*(.*)\s*$//' | while read -r dep; do + [ -z "$dep" ] || printf " - name: \"%s\"\n" "$dep" >> $GITHUB_ENV + done +done +printf " - source-path: \"%s\"\n" $(find ArduinoCore-zephyr/libraries/ -maxdepth 1 -mindepth 1 -type d) >> $GITHUB_ENV +echo "EOF" >> $GITHUB_ENV + +if [ -f $VARIANT_DIR/skip_these_examples.txt ] ; then + cat $VARIANT_DIR/skip_these_examples.txt | sed -e 's/\s*#.*//' -e '/^\s*$/d' | while read -r pattern; do + sed -i -e "\\|^\\(ArduinoCore-zephyr/\\)\\?${pattern}|d" $ALL_TESTS + done +fi +echo "ALL_TESTS<> $GITHUB_ENV +cat $ALL_TESTS | while read -r infile; do + printf " - \"%s\"\n" "$(dirname "$infile")" >> $GITHUB_ENV +done +echo "EOF" >> $GITHUB_ENV diff --git a/variants/arduino_giga_r1_stm32h747xx_m7/skip_these_examples.txt b/variants/arduino_giga_r1_stm32h747xx_m7/skip_these_examples.txt new file mode 100644 index 000000000..e49284a81 --- /dev/null +++ b/variants/arduino_giga_r1_stm32h747xx_m7/skip_these_examples.txt @@ -0,0 +1,8 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are not applicable for this variant, +# for example because it uses hardware features not present on the CPU or board. +# +# Each line in this file should contain the path to an example to exclude, +# relative to the root of the repository. + +libraries/Ethernet diff --git a/variants/arduino_nano_33_ble_nrf52840_sense/skip_these_examples.txt b/variants/arduino_nano_33_ble_nrf52840_sense/skip_these_examples.txt new file mode 100644 index 000000000..8b64f3484 --- /dev/null +++ b/variants/arduino_nano_33_ble_nrf52840_sense/skip_these_examples.txt @@ -0,0 +1,13 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are not applicable for this variant, +# for example because it uses hardware features not present on the CPU or board. +# +# Each line in this file should contain the path to an example to exclude, +# relative to the root of the repository. + +libraries/Arduino_SecureElement +libraries/Camera +libraries/Ethernet +libraries/Storage +libraries/WiFi +libraries/Zephyr_SDRAM diff --git a/variants/arduino_nano_matter_mgm240sd22vna/skip_these_examples.txt b/variants/arduino_nano_matter_mgm240sd22vna/skip_these_examples.txt new file mode 100644 index 000000000..2b4d23cad --- /dev/null +++ b/variants/arduino_nano_matter_mgm240sd22vna/skip_these_examples.txt @@ -0,0 +1,16 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are not applicable for this variant, +# for example because it uses hardware features not present on the CPU or board. +# +# Each line in this file should contain the path to an example to exclude, +# relative to the root of the repository. + +# no Serial1 object +examples/arduino-examples/examples/04.Communication/SerialPassthrough + +libraries/Arduino_SecureElement +libraries/Camera +libraries/Ethernet +libraries/Storage +libraries/WiFi +libraries/Zephyr_SDRAM diff --git a/variants/arduino_nicla_sense_me_nrf52832/skip_these_examples.txt b/variants/arduino_nicla_sense_me_nrf52832/skip_these_examples.txt new file mode 100644 index 000000000..2b4d23cad --- /dev/null +++ b/variants/arduino_nicla_sense_me_nrf52832/skip_these_examples.txt @@ -0,0 +1,16 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are not applicable for this variant, +# for example because it uses hardware features not present on the CPU or board. +# +# Each line in this file should contain the path to an example to exclude, +# relative to the root of the repository. + +# no Serial1 object +examples/arduino-examples/examples/04.Communication/SerialPassthrough + +libraries/Arduino_SecureElement +libraries/Camera +libraries/Ethernet +libraries/Storage +libraries/WiFi +libraries/Zephyr_SDRAM diff --git a/variants/arduino_opta_stm32h747xx_m7/skip_these_examples.txt b/variants/arduino_opta_stm32h747xx_m7/skip_these_examples.txt new file mode 100644 index 000000000..8394dcf0b --- /dev/null +++ b/variants/arduino_opta_stm32h747xx_m7/skip_these_examples.txt @@ -0,0 +1,9 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are not applicable for this variant, +# for example because it uses hardware features not present on the CPU or board. +# +# Each line in this file should contain the path to an example to exclude, +# relative to the root of the repository. + +libraries/Camera +libraries/Zephyr_SDRAM diff --git a/variants/arduino_portenta_c33_r7fa6m5bh3cfc/skip_these_examples.txt b/variants/arduino_portenta_c33_r7fa6m5bh3cfc/skip_these_examples.txt new file mode 100644 index 000000000..a0e556dde --- /dev/null +++ b/variants/arduino_portenta_c33_r7fa6m5bh3cfc/skip_these_examples.txt @@ -0,0 +1,8 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are not applicable for this variant, +# for example because it uses hardware features not present on the CPU or board. +# +# Each line in this file should contain the path to an example to exclude, +# relative to the root of the repository. + +libraries/Camera diff --git a/variants/arduino_portenta_h7_stm32h747xx_m7/skip_these_examples.txt b/variants/arduino_portenta_h7_stm32h747xx_m7/skip_these_examples.txt new file mode 100644 index 000000000..50059711b --- /dev/null +++ b/variants/arduino_portenta_h7_stm32h747xx_m7/skip_these_examples.txt @@ -0,0 +1,7 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are not applicable for this variant, +# for example because it uses hardware features not present on the CPU or board. +# +# Each line in this file should contain the path to an example to exclude, +# relative to the root of the repository. + diff --git a/variants/arduino_uno_q_stm32u585xx/skip_these_examples.txt b/variants/arduino_uno_q_stm32u585xx/skip_these_examples.txt new file mode 100644 index 000000000..078ff2e8d --- /dev/null +++ b/variants/arduino_uno_q_stm32u585xx/skip_these_examples.txt @@ -0,0 +1,9 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are not applicable for this variant, +# for example because it uses hardware features not present on the CPU or board. +# +# Each line in this file should contain the path to an example to exclude, +# relative to the root of the repository. + +libraries/ArduinoBLE +libraries/Arduino_RPClite/extras/integration_test diff --git a/variants/ek_ra8d1_r7fa8d1bhecbd/skip_these_examples.txt b/variants/ek_ra8d1_r7fa8d1bhecbd/skip_these_examples.txt new file mode 100644 index 000000000..6ea1139b4 --- /dev/null +++ b/variants/ek_ra8d1_r7fa8d1bhecbd/skip_these_examples.txt @@ -0,0 +1,8 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are not applicable for this variant, +# for example because it uses hardware features not present on the CPU or board. +# +# Each line in this file should contain the path to an example to exclude, +# relative to the root of the repository. + +libraries/ diff --git a/variants/frdm_mcxn947_mcxn947_cpu0/skip_these_examples.txt b/variants/frdm_mcxn947_mcxn947_cpu0/skip_these_examples.txt new file mode 100644 index 000000000..6ea1139b4 --- /dev/null +++ b/variants/frdm_mcxn947_mcxn947_cpu0/skip_these_examples.txt @@ -0,0 +1,8 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are not applicable for this variant, +# for example because it uses hardware features not present on the CPU or board. +# +# Each line in this file should contain the path to an example to exclude, +# relative to the root of the repository. + +libraries/ diff --git a/variants/frdm_rw612_rw612/skip_these_examples.txt b/variants/frdm_rw612_rw612/skip_these_examples.txt new file mode 100644 index 000000000..6ea1139b4 --- /dev/null +++ b/variants/frdm_rw612_rw612/skip_these_examples.txt @@ -0,0 +1,8 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are not applicable for this variant, +# for example because it uses hardware features not present on the CPU or board. +# +# Each line in this file should contain the path to an example to exclude, +# relative to the root of the repository. + +libraries/ From b1eca064c4e3e141e73df1c53b2013b381b1e421 Mon Sep 17 00:00:00 2001 From: Luca Burelli Date: Fri, 9 Jan 2026 17:14:07 +0100 Subject: [PATCH 09/22] package_core: refact 2/x: process logs with Python script This commit replaces the previous shell script for log inspection with a more comprehensive processing with Python script. The new script provides better organization of test results, including a summary table, detailed per-artifact and per-sketch breakdowns, and improved formatting for readability in GitHub Actions summaries. Job database IDs are now obtained once before log generation instead of once per job, solving API rate limit issues. This also requires an update to compile-sketches for 2 new features: - 'issues-report' generates an 'issues' array in the report with warning/error lines extracted from the compilation stderr; - 'always-succeed' makes the action always exit 0, allowing CI to continue even when some compilations fail. Signed-off-by: Luca Burelli --- .github/workflows/package_core.yml | 142 +++++----- extra/ci_inspect_logs.py | 429 +++++++++++++++++++++++++++++ 2 files changed, 497 insertions(+), 74 deletions(-) create mode 100755 extra/ci_inspect_logs.py diff --git a/.github/workflows/package_core.yml b/.github/workflows/package_core.yml index a6c621ee3..33f6c9485 100644 --- a/.github/workflows/package_core.yml +++ b/.github/workflows/package_core.yml @@ -65,13 +65,15 @@ jobs: path: arduino-api.tar.zstd build-board: - name: Build loader for ${{ matrix.board }} + # NOTE: this name is hardcoded in ci_inspect_logs.py + name: Build for ${{ matrix.board }} runs-on: ubuntu-latest needs: - build-env env: CCACHE_IGNOREOPTIONS: -specs=* - OUTPUT_ARTIFACT: binaries-${{ matrix.board }}-${{ needs.build-env.outputs.CORE_HASH }} + ARTIFACT_TAG: ${{ needs.build-env.outputs.CORE_HASH }}-${{ matrix.board }} + REPORT: reports/zephyr-${{ matrix.variant }} strategy: matrix: include: @@ -99,30 +101,53 @@ jobs: - name: Build loader shell: bash run: | - if ! ./extra/build.sh ${{ matrix.board }} 2> >(tee error.log) ; then + mkdir -p reports + if ! ./extra/build.sh ${{ matrix.board }} 1> >(tee $REPORT.stdout) 2> >(tee $REPORT.stderr) ; then echo "### :x: ${{ matrix.board }} (\`${{ matrix.variant }}\`) build errors" > $GITHUB_STEP_SUMMARY echo >> $GITHUB_STEP_SUMMARY echo "\`\`\`" >> $GITHUB_STEP_SUMMARY - cat error.log >> $GITHUB_STEP_SUMMARY + cat $REPORT.stderr >> $GITHUB_STEP_SUMMARY echo "\`\`\`" >> $GITHUB_STEP_SUMMARY exit 1 fi - - name: Package board binaries + # look for warnings (errors are a happy path!) + grep -i "warning:" $REPORT.stdout > $REPORT.warnings || true + + # extract the memory usage table (from the header to the first non-% line) + cat $REPORT.stdout | sed -n '/^Memory region/,/^[^%]*$/p' | head -n -1 \ + | awk 'BEGIN {split("B KB MB GB", u); for(i in u) m[u[i]]=1024^(i-1)} /:/ {print "[\"" $1 "\"," $2*m[$3] "," $4*m[$5] "]"}' \ + | sort | jq -s > $REPORT.meminfo + + - name: Package board artifacts if: ${{ !cancelled() }} run: | + cp firmwares/zephyr-${{ matrix.variant }}.config $REPORT.config + echo "REPORT_FILES<> $GITHUB_ENV + ls reports/* >> $GITHUB_ENV + echo "EOF" >> $GITHUB_ENV + + # Archive built binaries (and build dir on failure) + [ "${{ job.status }}" == "failure" ] && FULL_BUILD_DIR="build/${{ matrix.variant }}/" tar chf - \ firmwares/*${{ matrix.variant }}* \ variants/${{ matrix.variant }}/ \ - ${{ (job.status == 'failure') && format('build/{0}/', matrix.variant) }} \ - | zstd > ${OUTPUT_ARTIFACT}.tar.zstd + ${FULL_BUILD_DIR} \ + | zstd > binaries-${ARTIFACT_TAG}.tar.zstd - name: Archive board binaries if: ${{ !cancelled() }} uses: actions/upload-artifact@v4 with: - name: ${{ format('{0}{1}', (job.status == 'failure') && 'failed-' || '', env.OUTPUT_ARTIFACT) }} - path: ${{ env.OUTPUT_ARTIFACT }}.tar.zstd + name: ${{ format('{0}binaries-{1}', (job.status == 'failure') && 'failed-' || '', env.ARTIFACT_TAG) }} + path: binaries-${{ env.ARTIFACT_TAG }}.tar.zstd + + - name: Archive build reports + if: ${{ !cancelled() }} + uses: actions/upload-artifact@v4 + with: + name: build-report-${{ env.ARTIFACT_TAG }} + path: ${{ env.REPORT_FILES }} package-core: name: Package ${{ matrix.artifact }} @@ -188,7 +213,8 @@ jobs: failOnError: false test-core: - name: Test ${{ matrix.subarch }}:${{ matrix.board }} + # NOTE: this name is hardcoded in ci_inspect_logs.py + name: Test ${{ matrix.board }} runs-on: ubuntu-latest needs: - build-env @@ -228,7 +254,7 @@ jobs: extra/ci_test_list.sh ${{ matrix.artifact }} ${{ matrix.variant }} - name: Compile tests for ${{ matrix.board }} - uses: pillo79/compile-sketches@main + uses: pillo79/compile-sketches@f4cb4ece9506b38c953a7da543b9ec12d0082e23 # next with: fqbn: ${{ env.FQBN }} platforms: | @@ -247,28 +273,14 @@ jobs: - 'compiler.cpp.extra_flags=-Wno-type-limits -Wno-missing-field-initializers' verbose: 'false' enable-deltas-report: 'false' - enable-warnings-report: 'true' - enable-warnings-log: 'true' - - - name: Get job ID - id: job_id - if: ${{ success() || failure() }} - uses: actions/github-script@main - with: - script: | - const { data: workflow_run } = await github.rest.actions.listJobsForWorkflowRun({ - owner: context.repo.owner, - repo: context.repo.repo, - run_id: context.runId - }); - const job_name = `Test ${{ matrix.subarch }}:${{ matrix.board }}` - return workflow_run.jobs.find((job) => job.name === job_name).id; + enable-issues-report: 'true' + always-succeed: 'true' - name: Prepare log if: ${{ success() || failure() }} run: | + [ ! -f sketches-reports/${REPORT_FILE} ] && mkdir -p sketches-reports && echo "{}" > sketches-reports/${REPORT_FILE} sed -i -e 's!/home/runner/.arduino15/packages/arduino/hardware/zephyr/[^/]*/!!g' sketches-reports/${REPORT_FILE} - cat sketches-reports/${REPORT_FILE} | jq -cr ".boards[0].sketches[0] += { job_id: ${{ steps.job_id.outputs.result }} }" > ${REPORT_FILE} && mv ${REPORT_FILE} sketches-reports/ - uses: actions/upload-artifact@v4 if: ${{ success() || failure() }} @@ -276,8 +288,8 @@ jobs: name: test-report-${{ env.ARTIFACT_TAG }} path: sketches-reports/* - collect-logs: - name: Collect logs + inspect-logs: + name: Analyze logs runs-on: ubuntu-latest needs: - build-env @@ -286,67 +298,49 @@ jobs: if: ${{ !cancelled() && needs.build-env.result == 'success' }} env: ALL_BOARD_DATA: ${{ needs.build-env.outputs.ALL_BOARD_DATA }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GH_REPO: ${{ github.repository }} steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + persist-credentials: false + fetch-tags: true + - uses: actions/download-artifact@v4 with: path: . - pattern: test-report-* + pattern: "*-report-*" merge-multiple: true - run: | - ARTIFACTS=$(jq -cr 'map(.artifact) | unique | .[]' <<< ${ALL_BOARD_DATA}) # this avoids the 'zephyr' artifact - for artifact in $ARTIFACTS ; do - echo "### \`$artifact\` test results:" >> "$GITHUB_STEP_SUMMARY" - jq -c "map(select(.artifact == \"$artifact\")) | .[]" <<< ${ALL_BOARD_DATA} | while read -r BOARD_DATA; do - BOARD=$(echo $BOARD_DATA | jq -cr '.board') - VARIANT=$(echo $BOARD_DATA | jq -cr '.variant') - SUBARCH=$(echo $BOARD_DATA | jq -cr '.subarch') - FQBN="arduino:$SUBARCH:$BOARD" - REPORT_FILE="$(echo $FQBN | tr ':' '-').json" - if [ ! -f $REPORT_FILE ]; then - echo "* :x: $BOARD (\`$VARIANT\`) - No report found?" >> "$GITHUB_STEP_SUMMARY" - else - REPORT=$(jq -cr '.boards[0].sketches[0]' $REPORT_FILE) - JOB_ID=$(echo $REPORT | jq -cr '.job_id') - JOB_URL="https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}/job/${JOB_ID}#step:5:2" - if ! $(echo $REPORT | jq -cr '.compilation_success') ; then - echo "* :x: [$BOARD]($JOB_URL) (\`$VARIANT\`) - Build failed" >> "$GITHUB_STEP_SUMMARY" - else - WARNINGS=$(echo $REPORT | jq -cr '.warnings.current.absolute // 0') - if [ $WARNINGS -eq 0 ]; then - echo "* :white_check_mark: $BOARD (\`$VARIANT\`) - Build successful" >> "$GITHUB_STEP_SUMMARY" - else - echo >> "$GITHUB_STEP_SUMMARY" - echo "
   :warning: $BOARD ($VARIANT) - $WARNINGS warnings:" >> "$GITHUB_STEP_SUMMARY" - echo >> "$GITHUB_STEP_SUMMARY" - echo "\`\`\`" >> "$GITHUB_STEP_SUMMARY" - echo $REPORT | jq -cr '.warnings_log[]' >> "$GITHUB_STEP_SUMMARY" - echo "\`\`\`" >> "$GITHUB_STEP_SUMMARY" - echo >> "$GITHUB_STEP_SUMMARY" - echo "
" >> "$GITHUB_STEP_SUMMARY" - echo >> "$GITHUB_STEP_SUMMARY" - fi - fi - fi - done - done + export WORKFLOW_JOBS=$(gh run view ${{ github.run_id }} --attempt ${{ github.run_attempt }} --json jobs --jq '.jobs') + extra/ci_inspect_logs.py result summary full_log + + cat summary >> $GITHUB_STEP_SUMMARY + cat full_log >> $GITHUB_STEP_SUMMARY + echo "CI_RESULT=$(cat result)" > $GITHUB_OUTPUT - name: Clean up intermediate artifacts uses: geekyeggo/delete-artifact@v5.1.0 with: - name: test-report-* + name: | + build-report-* + test-report-* failOnError: false verify-core: runs-on: ubuntu-latest - if: cancelled() || contains(needs.*.result, 'failure') needs: - build-env - package-core - - test-core + - inspect-logs + if: ${{ !cancelled() }} steps: - - name: Notify failure - run: exit 1 + - name: CI run result + run: | + echo ${{ needs.inspect-logs.outputs.CI_RESULT }} + exit ${{ ((needs.inspect-logs.outputs.CI_RESULT == 'PASSED') && !contains(needs.*.result, 'failure')) && '0' || '1' }} publish-core: name: Publish core @@ -355,7 +349,7 @@ jobs: needs: - build-env - package-core - - test-core + - inspect-logs environment: production permissions: id-token: write @@ -385,7 +379,7 @@ jobs: needs: - build-env - package-core - - test-core + - inspect-logs env: CORE_TAG: ${{ needs.build-env.outputs.CORE_TAG }} CORE_HASH: ${{ needs.build-env.outputs.CORE_HASH }} diff --git a/extra/ci_inspect_logs.py b/extra/ci_inspect_logs.py new file mode 100755 index 000000000..ae93dea4f --- /dev/null +++ b/extra/ci_inspect_logs.py @@ -0,0 +1,429 @@ +#!/usr/bin/env python3 + +from collections import defaultdict +import json +import os +import re +import sys + +SKIP = -1 # Test was not performed +PASS = 0 # (PASS) Compiled successfully +WARNING = 1 # (PASS) Compiled with warnings +ERROR = 2 # (FAIL) Compilation failed with errors +FAILURE = 3 # Test run failed to complete + +TEST_LEGEND = [ + "Test passed successfully, with no warnings or errors.", + "Test completed with some warnings; no errors detected.", + "Test completed with unexpected errors.", + "Test run failed to complete.", + "Test was skipped." # -1 +] + +TEST_STATUS = [ + ":green_circle:", + ":yellow_circle:", + ":red_circle:", + ":fire:", + ":new_moon:" # -1 +] + +BOARD_STATUS = [ + ":white_check_mark:", + ":white_check_mark:*", + ":x:", + ":fire:", + ":new_moon:" # -1 +] + +# Loader build status data structure +class LoaderEntry: + def __init__(self, artifact, board, variant, job_link): + self.artifact = artifact + self.board = board + self.variant = variant + self.job_link = job_link + + self.status = PASS + +# Single test data structure +class TestEntry: + def __init__(self, artifact, board, sketch, status, issues, job_link): + self.artifact = artifact + self.board = board + self.sketch = sketch + self.status = status + self.issues = issues + self.job_link = job_link + +# Summary data structure +class TestGroup: + def __init__(self): + # Sets to track unique board and sketch names + self.boards = set() + self.sketches = set() + # Counts of test results by status + self.counts = { status : 0 for status in [PASS, WARNING, ERROR, FAILURE] } + # Overall status of the group + self.status = SKIP + # List of individual TestEntry objects (all, only with issues) + self.tests = [] + self.tests_with_issues = [] + + def track(self, test_entry): + """ + Update this group with a new test entry + """ + self.tests.append(test_entry) + if test_entry.issues: + self.tests_with_issues.append(test_entry) + + self.counts[test_entry.status] += 1 + self.status = max(self.status, test_entry.status) + self.boards.add(test_entry.board) + self.sketches.add(test_entry.sketch) + +# Global Data Structures +# ---------------------- + +# Loader build results, one per board +BOARD_LOADERS = {} # { board: LoaderEntry() } + +# Test results grouped by artifact, board, and artifact/sketch +# (grouping sketch results across different artifacts is really confusing) +ARTIFACT_TESTS = defaultdict(TestGroup) # { artifact: TestGroup() } +BOARD_TESTS = defaultdict(TestGroup) # { board: TestGroup() } +SKETCH_TESTS = defaultdict(lambda: defaultdict(TestGroup)) # { artifact: { sketch: TestGroup() } } + +def log_test(artifact, board, sketch, status, issues, job_link=None): + """ + Logs individual test results into the global test tracking structures. + """ + + # Ensure issues is a list + if isinstance(issues, str): + issues = [ issues ] + + # Create the test entry + test_entry = TestEntry(artifact, board, sketch, status, issues, job_link) + + # Track in global structures + ARTIFACT_TESTS[artifact].track(test_entry) + BOARD_TESTS[board].track(test_entry) + SKETCH_TESTS[artifact][sketch].track(test_entry) + +def print_summary(): + """ + Prints the summary section of the report, including overall status and a recap table. + """ + + if ci_run_passed: + title = f"# [CI run]({JOB_LINK_STEM}#user-content-summary) PASSED :green_circle:\n" + else: + failed_boards = [ f"{BOARD_STATUS[res.status]} `{board}`" for board, res in BOARD_TESTS.items() if res.status in (ERROR, FAILURE) ] + title = f"# [CI run]({JOB_LINK_STEM}#user-content-summary) FAILED: {', '.join(failed_boards)}\n" + f_print("\n") + f_print(title) + + # Print the recap table, one line per board. + # 8 columns: + # - Artifact name (multi-row for boards under the same artifact) + # - Board name + # - Core compilation status (ok, number of warnings) + # - Overall sketch compilation status for the core + # - Used RAM percent + # - Sketches tested + # - Sketches with warnings + # - Failed sketches + f_print("\n") + + for artifact in ARTIFACTS: + artifact_boards = sorted(ARTIFACT_TESTS[artifact].boards) + artifact_status = ARTIFACT_TESTS[artifact].status + + first_line = True + for board in artifact_boards: + # Artifact name + if first_line: + f_print(f"", f"") + first_line = False + + # Board name + res = BOARD_LOADERS[board] + if res.job_link: + f_print(f"") + continue + + pin = f"{len(res.warnings)} :label:" if res.status == WARNING else ":green_book:" + f_print(f"") + + # Sketch build status + message on failure + res = BOARD_TESTS[board] + f_print(f"") + if res.status == FAILURE: + f_print(f"") + continue + + # Test count summary + tests_str = len(res.tests) or "-" + warnings_str = res.counts[WARNING] or "-" + errors_str = f"{res.counts[ERROR]}" if res.counts[ERROR] else "-" + f_print(f"") + f_print("
ArtifactBoardCoreTestsRAMSketchesWarningsErrors
{BOARD_STATUS[artifact_status]} {artifact}{board}") + else: + f_print(f"{board}") + + # Core build status + message on failure + if res.status == FAILURE: + f_print(f"{BOARD_STATUS[FAILURE]}Core build failed!
{pin}{BOARD_STATUS[res.status]}") + f_print("
".join(f"{test.issues[0]} (full log)" for test in res.tests)) + f_print("
{tests_str}{warnings_str}{errors_str}
\n") + + # Print the legend + f_print("
Legend") + f_print("

") + for status in FAILURE, ERROR, WARNING, PASS, SKIP: + f_print(f"") + f_print(f"") + f_print(f"") + f_print("
BoardTestStatus description
{BOARD_STATUS[status]}{TEST_STATUS[status]}{TEST_LEGEND[status]}
\n") + + # Print artifact error warnings + for artifact in ARTIFACTS: + artifact_boards = sorted(ARTIFACT_TESTS[artifact].boards) + failed_boards = [ f"`{board}`" for board in artifact_boards if BOARD_TESTS[board].status in (ERROR, FAILURE) ] + + if failed_boards: + f_print("> [!CAUTION]") + f_print(f"> `{artifact}` is blocked due to failures on {', '.join(failed_boards)}!\n") + +def print_test_matrix(artifact, artifact_boards, title, sketch_filter=lambda x: True): + """ + Prints a matrix of test results for a given artifact and its boards. The + sketch_filter function determines which sketches to include. No table is printed + if no sketches pass the filter. + """ + + # Build the header row, which includes board names and board statuses. + # Headers have a link to the board's CI job, if available. + header_row = f"{artifact} {title}" + for board in artifact_boards: + res = BOARD_TESTS[board] + header_col = f"{board}
{BOARD_STATUS[res.status]}" + header_row += f"{header_col}" + header_row += "" + + # Group sketches by library + sketch_groups = {} + for sketch in ARTIFACT_TESTS[artifact].sketches: + res = SKETCH_TESTS[artifact][sketch] + if not sketch_filter(res): + continue + + # (1.................) (2....) (3................) (4.) + match = re.search(r'(libraries|examples)/([^/]+)/(examples/|extras/)?(.*)', sketch) + if match: + group = match.group(2) + sample = match.group(4) + else: + group = "" + sample = sketch + + if group not in sketch_groups: + sketch_groups[group] = [] + sketch_groups[group].append((sample, res)) + + # Build the data rows, grouping libraries together. Each row corresponds to + # a sketch, each cell to the test result icon of that sketch on that board. + data_rows = [] + for group in sorted(sketch_groups.keys()): + if group: + data_rows.append(f"{group}{group}") + #data_rows.append(f"{group}{''.join('---' for x in artifact_boards)}") + for sample, res in sorted(sketch_groups[group]): + row_data = f"{TEST_STATUS[res.status]}" + # If there are issues, make the sketch name a link to the detailed logs below + name_link = f"{sample}" + if res.tests_with_issues: + sketch = next(iter(res.sketches)) # only one + sketch_id = sketch.replace('/', '_').replace(' ', '_').replace('-', '_') + name_link = f"{name_link}" + row_data += f"{name_link}" + + for board in artifact_boards: + test = next((test for test in res.tests if test.board == board), None) + status = test.status if test else SKIP + issues = test.issues if test else "" + + row_data += f"{TEST_STATUS[status]}" + + row_data += "" + data_rows.append(row_data) + + if not data_rows: + return + + # Print the table + f_print("") + f_print(header_row) + for row in data_rows: + f_print(row) + f_print("
\n") + + # Print detailed logs for sketches with issues + for group in sorted(sketch_groups.keys()): + for sample, res in sorted(sketch_groups[group]): + if not res.tests_with_issues: + continue + + # Header and anchor for the detailed logs + sketch = next(iter(res.sketches)) # only one + sketch_id = sketch.replace('/', '_').replace(' ', '_').replace('-', '_') + f_print(f"") + f_print(f"
{artifact} logs for {TEST_STATUS[res.status]} {group} {sample}") + f_print("

") + + # Test logs by board + for test in sorted(res.tests_with_issues, key=lambda x: x.status, reverse=True): + test_text = f"{test.board}" + if test.job_link: + test_text += f" (full log)" + f_print(f"") + + f_print("
{TEST_STATUS[test.status]} {test_text}
\n\n```") + for line in test.issues: + f_print(line) + f_print("```\n
\n") + + +# Main Logic +# ---------- + +# Environment Variable Checks +ALL_BOARD_DATA_STR = os.environ.get('ALL_BOARD_DATA') +WORKFLOW_JOBS_STR = os.environ.get('WORKFLOW_JOBS') +GITHUB_REPOSITORY = os.environ.get('GITHUB_REPOSITORY') +GITHUB_RUN_ID = os.environ.get('GITHUB_RUN_ID') +JOB_LINK_STEM = f"https://github.com/{GITHUB_REPOSITORY}/actions/runs/{GITHUB_RUN_ID}" + +if not ALL_BOARD_DATA_STR or not GITHUB_REPOSITORY or not GITHUB_RUN_ID: + print("Not in a Github CI run, cannot proceed.") + sys.exit(0) + +if not len(sys.argv) in (1, 4): + print("Usage: ci_inspect_logs.py [ ]") + sys.exit(1) + +if len(sys.argv) == 4: + results_file = sys.argv[1] + summary_file = sys.argv[2] + full_report_file = sys.argv[3] +else: + results_file = "/dev/null" + summary_file = full_report_file = "/dev/stdout" + +ALL_BOARD_DATA = json.loads(ALL_BOARD_DATA_STR) +ALL_BOARD_DATA = { b['board']: b for b in ALL_BOARD_DATA } + +JOB_URLS = json.loads(WORKFLOW_JOBS_STR) +JOB_URLS = { j['name']: j['url'] for j in JOB_URLS } + +for board_data in ALL_BOARD_DATA.values(): + # Extract common fields + artifact = board_data['artifact'] + board = board_data['board'] + variant = board_data['variant'] + subarch = board_data['subarch'] + + # Get job link for this build + job_link = JOB_URLS.get(f"Build for {board}") + if job_link: + job_link += "#step:5:1" + + BOARD_LOADERS[board] = LoaderEntry(artifact, board, variant, job_link) + if BOARD_LOADERS[board].status == FAILURE: + log_test(artifact, board, 'CI test', '', [], FAILURE, "Core data could not be read.") + continue + + # Get job link for this test + job_link = JOB_URLS.get(f"Test {board}") + if job_link: + job_link += "#step:6:1" + + # Extract data from the report file + report_file = f"arduino-{subarch}-{board}.json" + if not os.path.exists(report_file): + log_test(artifact, board, 'CI test', '', [], FAILURE, f"Report file not found.", job_link) + continue # Skip to the next board + + try: + with open(report_file, 'r') as f: + report_data = json.load(f) + except Exception as e: + log_test(artifact, board, 'CI test', '', [], FAILURE, f"Error reading report file: {e}", job_link) + continue # Skip to the next board + + reports = report_data.get('boards', [{}])[0].get('sketches', []) + if not reports: + log_test(artifact, board, 'CI test', '', [], FAILURE, "Test report is empty, check CI log.", job_link) + continue # Skip to the next board + + # Iterate through individual sketch reports + for report in reports: + sketch = report.get('name', 'unknown_sketch') + success = report.get('compilation_success', False) + issues = report.get('issues', []) + + # Replace long absolute paths with '...' for brevity. + sketch_issues = [ re.sub(r'(/.+?)((/[^/]+){3}):', r'...\2:', issue) for issue in issues ] + + if not success: + status = ERROR + elif len(sketch_issues): # Implies warnings/non-critical issues + status = WARNING + else: + status = PASS + + log_test(artifact, board, sketch, status, sketch_issues, job_link) + +ARTIFACTS = sorted(ARTIFACT_TESTS.keys()) + +# Begin output of the report +# -------------------------- + +ci_run_status = max(res.status for res in ARTIFACT_TESTS.values()) +ci_run_passed = ci_run_status in (PASS, WARNING) + +with open(summary_file, 'w') as f: + f_print = lambda *args, **kwargs: print(*args, file=f, **kwargs) + + print_summary() + +with open(full_report_file, 'w') as f: + f_print = lambda *args, **kwargs: print(*args, file=f, **kwargs) + + # Print the test matrix sections per artifact + for artifact in ARTIFACTS: + artifact_boards = sorted([ board for board in ARTIFACT_TESTS[artifact].boards if BOARD_TESTS[board].status != FAILURE ]) + + if not artifact_boards: + continue + + f_print(f"") + f_print("\n---\n") + print_test_matrix(artifact, artifact_boards, "issues", sketch_filter=lambda res: res.status == ERROR) + + successful_tests = ARTIFACT_TESTS[artifact].counts[PASS] + ARTIFACT_TESTS[artifact].counts[WARNING] + warning_tests = ARTIFACT_TESTS[artifact].counts[WARNING] + if successful_tests: + summary = f"{successful_tests} successful {artifact} tests hidden" + if warning_tests: + summary += f" ({warning_tests} with warnings)" + + f_print(f"
{summary}

\n") + print_test_matrix(artifact, artifact_boards, "tests", sketch_filter=lambda res: res.status in (PASS, WARNING)) + f_print("
\n") + +with open(results_file, 'w') as f: + if ci_run_passed: + f.write('PASSED\n') From 56c2506d284bf078eb6432a4e18f4caefa8fcc3d Mon Sep 17 00:00:00 2001 From: Luca Burelli Date: Fri, 9 Jan 2026 16:16:17 +0100 Subject: [PATCH 10/22] ci_inspect_logs [refactor 3/x]: handle expected errors Some examples are known to fail on specific variants, and CI should not be permanently marked as failed because of them. This patch adds support for a per-variant list of known failing examples, and marks tests that fail due to these examples as "expected errors", which are logged but do not cause the overall CI run to be marked as failed. Signed-off-by: Luca Burelli --- extra/ci_inspect_logs.py | 54 ++++++++++++++++--- .../known_example_issues.txt | 8 +++ .../known_example_issues.txt | 8 +++ .../known_example_issues.txt | 10 ++++ .../known_example_issues.txt | 8 +++ .../known_example_issues.txt | 12 +++++ .../known_example_issues.txt | 14 +++++ .../known_example_issues.txt | 12 +++++ .../known_example_issues.txt | 8 +++ .../known_example_issues.txt | 8 +++ .../known_example_issues.txt | 8 +++ .../frdm_rw612_rw612/known_example_issues.txt | 8 +++ 12 files changed, 150 insertions(+), 8 deletions(-) create mode 100644 variants/arduino_giga_r1_stm32h747xx_m7/known_example_issues.txt create mode 100644 variants/arduino_nano_33_ble_nrf52840_sense/known_example_issues.txt create mode 100644 variants/arduino_nano_matter_mgm240sd22vna/known_example_issues.txt create mode 100644 variants/arduino_nicla_sense_me_nrf52832/known_example_issues.txt create mode 100644 variants/arduino_opta_stm32h747xx_m7/known_example_issues.txt create mode 100644 variants/arduino_portenta_c33_r7fa6m5bh3cfc/known_example_issues.txt create mode 100644 variants/arduino_portenta_h7_stm32h747xx_m7/known_example_issues.txt create mode 100644 variants/arduino_uno_q_stm32u585xx/known_example_issues.txt create mode 100644 variants/ek_ra8d1_r7fa8d1bhecbd/known_example_issues.txt create mode 100644 variants/frdm_mcxn947_mcxn947_cpu0/known_example_issues.txt create mode 100644 variants/frdm_rw612_rw612/known_example_issues.txt diff --git a/extra/ci_inspect_logs.py b/extra/ci_inspect_logs.py index ae93dea4f..47859ffa3 100755 --- a/extra/ci_inspect_logs.py +++ b/extra/ci_inspect_logs.py @@ -9,12 +9,14 @@ SKIP = -1 # Test was not performed PASS = 0 # (PASS) Compiled successfully WARNING = 1 # (PASS) Compiled with warnings -ERROR = 2 # (FAIL) Compilation failed with errors -FAILURE = 3 # Test run failed to complete +EXPECTED_ERROR = 2 # (PASS*) Compilation failed with expected errors +ERROR = 3 # (FAIL) Compilation failed with errors +FAILURE = 4 # Test run failed to complete TEST_LEGEND = [ "Test passed successfully, with no warnings or errors.", "Test completed with some warnings; no errors detected.", + "Test completed with errors, but all are known/expected.", "Test completed with unexpected errors.", "Test run failed to complete.", "Test was skipped." # -1 @@ -23,6 +25,7 @@ TEST_STATUS = [ ":green_circle:", ":yellow_circle:", + ":no_entry_sign:", ":red_circle:", ":fire:", ":new_moon:" # -1 @@ -31,6 +34,7 @@ BOARD_STATUS = [ ":white_check_mark:", ":white_check_mark:*", + ":heavy_check_mark:*", ":x:", ":fire:", ":new_moon:" # -1 @@ -63,7 +67,7 @@ def __init__(self): self.boards = set() self.sketches = set() # Counts of test results by status - self.counts = { status : 0 for status in [PASS, WARNING, ERROR, FAILURE] } + self.counts = { status : 0 for status in [PASS, WARNING, EXPECTED_ERROR, ERROR, FAILURE] } # Overall status of the group self.status = SKIP # List of individual TestEntry objects (all, only with issues) @@ -95,7 +99,7 @@ def track(self, test_entry): BOARD_TESTS = defaultdict(TestGroup) # { board: TestGroup() } SKETCH_TESTS = defaultdict(lambda: defaultdict(TestGroup)) # { artifact: { sketch: TestGroup() } } -def log_test(artifact, board, sketch, status, issues, job_link=None): +def log_test(artifact, board, sketch, exceptions, status, issues, job_link=None): """ Logs individual test results into the global test tracking structures. """ @@ -104,6 +108,10 @@ def log_test(artifact, board, sketch, status, issues, job_link=None): if isinstance(issues, str): issues = [ issues ] + # Adjust the status for expected errors + if status == ERROR and any(pattern.match(sketch) for pattern in exceptions): + status = EXPECTED_ERROR + # Create the test entry test_entry = TestEntry(artifact, board, sketch, status, issues, job_link) @@ -176,13 +184,18 @@ def print_summary(): tests_str = len(res.tests) or "-" warnings_str = res.counts[WARNING] or "-" errors_str = f"{res.counts[ERROR]}" if res.counts[ERROR] else "-" + if res.counts[EXPECTED_ERROR]: + if errors_str == "-": # only expected errors + errors_str = f"({res.counts[EXPECTED_ERROR]}*)" + else: # both actual and expected errors + errors_str += f" (+{res.counts[EXPECTED_ERROR]}*)" f_print(f"{tests_str}{warnings_str}{errors_str}") f_print("\n") # Print the legend f_print("
Legend") f_print("

") - for status in FAILURE, ERROR, WARNING, PASS, SKIP: + for status in FAILURE, ERROR, EXPECTED_ERROR, WARNING, PASS, SKIP: f_print(f"") f_print(f"") f_print(f"") @@ -345,6 +358,15 @@ def print_test_matrix(artifact, artifact_boards, title, sketch_filter=lambda x: log_test(artifact, board, 'CI test', '', [], FAILURE, "Core data could not be read.") continue + # Get list of expected errors for this board/variant + exceptions = [] + if os.path.exists(f"variants/{variant}/known_example_issues.txt"): + with open(f"variants/{variant}/known_example_issues.txt", 'r') as f: + for line in f: + sketch_pattern = line.split('#')[0].strip() + if sketch_pattern: + exceptions.append(re.compile(f"^(ArduinoCore-zephyr/)?{sketch_pattern}")) + # Get job link for this test job_link = JOB_URLS.get(f"Test {board}") if job_link: @@ -384,7 +406,7 @@ def print_test_matrix(artifact, artifact_boards, title, sketch_filter=lambda x: else: status = PASS - log_test(artifact, board, sketch, status, sketch_issues, job_link) + log_test(artifact, board, sketch, exceptions, status, sketch_issues, job_link) ARTIFACTS = sorted(ARTIFACT_TESTS.keys()) @@ -392,7 +414,7 @@ def print_test_matrix(artifact, artifact_boards, title, sketch_filter=lambda x: # -------------------------- ci_run_status = max(res.status for res in ARTIFACT_TESTS.values()) -ci_run_passed = ci_run_status in (PASS, WARNING) +ci_run_passed = ci_run_status in (PASS, WARNING, EXPECTED_ERROR) with open(summary_file, 'w') as f: f_print = lambda *args, **kwargs: print(*args, file=f, **kwargs) @@ -411,7 +433,23 @@ def print_test_matrix(artifact, artifact_boards, title, sketch_filter=lambda x: f_print(f"") f_print("\n---\n") - print_test_matrix(artifact, artifact_boards, "issues", sketch_filter=lambda res: res.status == ERROR) + + if any(BOARD_LOADERS[board].status != PASS for board in artifact_boards): + summary = f"{artifact} loader build warnings" + f_print(f"
{summary}

\n") + f_print("
BoardTestStatus description
{BOARD_STATUS[status]}{TEST_STATUS[status]}{TEST_LEGEND[status]}
") + f_print("") + for board in artifact_boards: + if BOARD_LOADERS[board].status == PASS: + continue + f_print(f"") + f_print("
BoardWarnings
{board}
")
+                for warning in BOARD_LOADERS[board].warnings:
+                    f_print(warning)
+                f_print("
\n") + f_print("
\n") + + print_test_matrix(artifact, artifact_boards, "issues", sketch_filter=lambda res: res.status in (ERROR, EXPECTED_ERROR)) successful_tests = ARTIFACT_TESTS[artifact].counts[PASS] + ARTIFACT_TESTS[artifact].counts[WARNING] warning_tests = ARTIFACT_TESTS[artifact].counts[WARNING] diff --git a/variants/arduino_giga_r1_stm32h747xx_m7/known_example_issues.txt b/variants/arduino_giga_r1_stm32h747xx_m7/known_example_issues.txt new file mode 100644 index 000000000..f80a141a5 --- /dev/null +++ b/variants/arduino_giga_r1_stm32h747xx_m7/known_example_issues.txt @@ -0,0 +1,8 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are known to be currently failing for +# this specific variant. +# +# Each line in this file is treated as a regular expression and will be matched +# against the path of each sketch found in this repo. If a match is found, the +# sketch compilation result will be ignored. + diff --git a/variants/arduino_nano_33_ble_nrf52840_sense/known_example_issues.txt b/variants/arduino_nano_33_ble_nrf52840_sense/known_example_issues.txt new file mode 100644 index 000000000..f80a141a5 --- /dev/null +++ b/variants/arduino_nano_33_ble_nrf52840_sense/known_example_issues.txt @@ -0,0 +1,8 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are known to be currently failing for +# this specific variant. +# +# Each line in this file is treated as a regular expression and will be matched +# against the path of each sketch found in this repo. If a match is found, the +# sketch compilation result will be ignored. + diff --git a/variants/arduino_nano_matter_mgm240sd22vna/known_example_issues.txt b/variants/arduino_nano_matter_mgm240sd22vna/known_example_issues.txt new file mode 100644 index 000000000..b25687e11 --- /dev/null +++ b/variants/arduino_nano_matter_mgm240sd22vna/known_example_issues.txt @@ -0,0 +1,10 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are known to be currently failing for +# this specific variant. +# +# Each line in this file is treated as a regular expression and will be matched +# against the path of each sketch found in this repo. If a match is found, the +# sketch compilation result will be ignored. + +# no A0 analog pin support +examples/arduino-examples/examples/01.Basics/AnalogReadSerial diff --git a/variants/arduino_nicla_sense_me_nrf52832/known_example_issues.txt b/variants/arduino_nicla_sense_me_nrf52832/known_example_issues.txt new file mode 100644 index 000000000..f80a141a5 --- /dev/null +++ b/variants/arduino_nicla_sense_me_nrf52832/known_example_issues.txt @@ -0,0 +1,8 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are known to be currently failing for +# this specific variant. +# +# Each line in this file is treated as a regular expression and will be matched +# against the path of each sketch found in this repo. If a match is found, the +# sketch compilation result will be ignored. + diff --git a/variants/arduino_opta_stm32h747xx_m7/known_example_issues.txt b/variants/arduino_opta_stm32h747xx_m7/known_example_issues.txt new file mode 100644 index 000000000..8b392234b --- /dev/null +++ b/variants/arduino_opta_stm32h747xx_m7/known_example_issues.txt @@ -0,0 +1,12 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are known to be currently failing for +# this specific variant. +# +# Each line in this file is treated as a regular expression and will be matched +# against the path of each sketch found in this repo. If a match is found, the +# sketch compilation result will be ignored. + +# bug at core link time +libraries/Ethernet/examples/UDPSendReceiveString +libraries/Ethernet/examples/UdpNtpClient +libraries/Storage/examples/FlashFormat diff --git a/variants/arduino_portenta_c33_r7fa6m5bh3cfc/known_example_issues.txt b/variants/arduino_portenta_c33_r7fa6m5bh3cfc/known_example_issues.txt new file mode 100644 index 000000000..9b7ef5387 --- /dev/null +++ b/variants/arduino_portenta_c33_r7fa6m5bh3cfc/known_example_issues.txt @@ -0,0 +1,14 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are known to be currently failing for +# this specific variant. +# +# Each line in this file is treated as a regular expression and will be matched +# against the path of each sketch found in this repo. If a match is found, the +# sketch compilation result will be ignored. + +# needs porting the SE05X library from mbed +libraries/Arduino_SecureElement/ + +# bug at core link time +libraries/Ethernet/examples/UDPSendReceiveString +libraries/Ethernet/examples/UdpNtpClient diff --git a/variants/arduino_portenta_h7_stm32h747xx_m7/known_example_issues.txt b/variants/arduino_portenta_h7_stm32h747xx_m7/known_example_issues.txt new file mode 100644 index 000000000..8b392234b --- /dev/null +++ b/variants/arduino_portenta_h7_stm32h747xx_m7/known_example_issues.txt @@ -0,0 +1,12 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are known to be currently failing for +# this specific variant. +# +# Each line in this file is treated as a regular expression and will be matched +# against the path of each sketch found in this repo. If a match is found, the +# sketch compilation result will be ignored. + +# bug at core link time +libraries/Ethernet/examples/UDPSendReceiveString +libraries/Ethernet/examples/UdpNtpClient +libraries/Storage/examples/FlashFormat diff --git a/variants/arduino_uno_q_stm32u585xx/known_example_issues.txt b/variants/arduino_uno_q_stm32u585xx/known_example_issues.txt new file mode 100644 index 000000000..f80a141a5 --- /dev/null +++ b/variants/arduino_uno_q_stm32u585xx/known_example_issues.txt @@ -0,0 +1,8 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are known to be currently failing for +# this specific variant. +# +# Each line in this file is treated as a regular expression and will be matched +# against the path of each sketch found in this repo. If a match is found, the +# sketch compilation result will be ignored. + diff --git a/variants/ek_ra8d1_r7fa8d1bhecbd/known_example_issues.txt b/variants/ek_ra8d1_r7fa8d1bhecbd/known_example_issues.txt new file mode 100644 index 000000000..f80a141a5 --- /dev/null +++ b/variants/ek_ra8d1_r7fa8d1bhecbd/known_example_issues.txt @@ -0,0 +1,8 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are known to be currently failing for +# this specific variant. +# +# Each line in this file is treated as a regular expression and will be matched +# against the path of each sketch found in this repo. If a match is found, the +# sketch compilation result will be ignored. + diff --git a/variants/frdm_mcxn947_mcxn947_cpu0/known_example_issues.txt b/variants/frdm_mcxn947_mcxn947_cpu0/known_example_issues.txt new file mode 100644 index 000000000..f80a141a5 --- /dev/null +++ b/variants/frdm_mcxn947_mcxn947_cpu0/known_example_issues.txt @@ -0,0 +1,8 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are known to be currently failing for +# this specific variant. +# +# Each line in this file is treated as a regular expression and will be matched +# against the path of each sketch found in this repo. If a match is found, the +# sketch compilation result will be ignored. + diff --git a/variants/frdm_rw612_rw612/known_example_issues.txt b/variants/frdm_rw612_rw612/known_example_issues.txt new file mode 100644 index 000000000..f80a141a5 --- /dev/null +++ b/variants/frdm_rw612_rw612/known_example_issues.txt @@ -0,0 +1,8 @@ +# This file contains a list of examples that are shipped with the core (or used +# by continuous integration tests), but are known to be currently failing for +# this specific variant. +# +# Each line in this file is treated as a regular expression and will be matched +# against the path of each sketch found in this repo. If a match is found, the +# sketch compilation result will be ignored. + From 7dfa6ad77883057ab13dba2e5149cad62a5ee88b Mon Sep 17 00:00:00 2001 From: Luca Burelli Date: Fri, 9 Jan 2026 15:30:32 +0100 Subject: [PATCH 11/22] testing: [refactor 4/x] extract memory information reports --- extra/ci_inspect_logs.py | 160 ++++++++++++++++++++++++++++++++++++++- 1 file changed, 157 insertions(+), 3 deletions(-) diff --git a/extra/ci_inspect_logs.py b/extra/ci_inspect_logs.py index 47859ffa3..308882b98 100755 --- a/extra/ci_inspect_logs.py +++ b/extra/ci_inspect_logs.py @@ -48,7 +48,57 @@ def __init__(self, artifact, board, variant, job_link): self.variant = variant self.job_link = job_link - self.status = PASS + self.warnings = self.read_warnings() # list of warning messages + self.config = self.read_config() # set of Kconfig settings + self.meminfo = self.read_meminfo() # memory usage report + + if not (self.config and self.meminfo): + self.status = FAILURE + elif self.warnings: + self.status = WARNING + else: + self.status = PASS + + def read_config(self): + # get board's config settings + report_file = f"zephyr-{self.variant}.config" + try: + with open(report_file, 'r') as f: + configs = {} + for line in f: + if line.startswith('#') or '=' not in line: + continue + sym, val = line.strip().split('=', 1) + if val.startswith('"'): + configs[sym] = val.strip('"') + elif val=='y': + configs[sym] = 1 + else: + configs[sym] = eval(val) + return configs + except Exception as e: + return {} + + def read_warnings(self): + # get list of board warnings (may be empty) + report_file = f"zephyr-{self.variant}.warnings" + try: + with open(report_file, 'r') as f: + return [ line.strip() for line in f if line.strip() ] + except Exception as e: + return [] + + def read_meminfo(self): + # get board's memory report + report_file = f"zephyr-{self.variant}.meminfo" + try: + with open(report_file, 'r') as f: + report_data = json.load(f) + meminfo = { region.replace(':',''): [used, total] for region, used, total in report_data } + meminfo.pop('IDT_LIST', None) + return meminfo + except Exception as e: + return {} # Single test data structure class TestEntry: @@ -165,7 +215,7 @@ def print_summary(): # Core build status + message on failure if res.status == FAILURE: - f_print(f"{BOARD_STATUS[FAILURE]}Core build failed!") + f_print(f"{BOARD_STATUS[FAILURE]}Core build failed!") continue pin = f"{len(res.warnings)} :label:" if res.status == WARNING else ":green_book:" @@ -175,11 +225,14 @@ def print_summary(): res = BOARD_TESTS[board] f_print(f"{BOARD_STATUS[res.status]}") if res.status == FAILURE: - f_print(f"") + f_print(f"") f_print("
".join(f"{test.issues[0]} (full log)" for test in res.tests)) f_print("") continue + # Memory usage + f_print(f"\n\n{color_entry(BOARD_LOADERS[board].meminfo['RAM'], False)}\n\n") + # Test count summary tests_str = len(res.tests) or "-" warnings_str = res.counts[WARNING] or "-" @@ -308,6 +361,99 @@ def print_test_matrix(artifact, artifact_boards, title, sketch_filter=lambda x: f_print("\n") +REGIONS_BY_SOC = defaultdict(set) # { soc: set(regions) } + +BASE_COLOR = 0x20 +DELTA_COLOR = 0xff-2*BASE_COLOR + +# percent is in range [0, 1] where 0 is good, 1 is bad +def get_percent(values): + if not values: + return 0.0 + return values[0] / values[1] + +def color_cmd(percent): + color_amt = int(DELTA_COLOR * percent) + return f"\\color{{#{BASE_COLOR + color_amt:02x}{0xff - color_amt:02x}{BASE_COLOR:02x}}}" + +def color_entry(values, full=True): + if not values: + return "" + + percent = get_percent(values) + if full: + return f"${{{color_cmd(percent)}\\frac{{{values[0]}}}{{{values[1]}}}\\space({percent*100:0.1f}\\\\%)}}$" + else: + return f"{'' if percent < 0.85 else ':warning:'} ${{{color_cmd(percent)}{percent*100:0.1f}\\\\%}}$" + +def print_mem_report(artifact, artifact_boards): + + f_print("", end='') + f_print("", end='') + f_print("", end='') + f_print("", end='') + f_print("", end='') + f_print("", end='') + f_print("", end='') + f_print("") + f_print("") + + for soc, board in sorted((ALL_BOARD_DATA[board]['soc'], board) for board in artifact_boards): + max_pct = max([ get_percent(BOARD_LOADERS[board].meminfo[r]) for r in ('FLASH', 'RAM') ]) + icon = ':warning:' if max_pct > 0.85 else '' + board_str = board.replace('_', '\\\\_') + + row = [ + icon, + f"${{{color_cmd(max_pct)}\\texttt{{{board_str}}}}}$", + f"{soc}", + color_entry(BOARD_LOADERS[board].meminfo['FLASH']), + color_entry(BOARD_LOADERS[board].meminfo['RAM']), + f"${{{ BOARD_LOADERS[board].config.get('CONFIG_HEAP_MEM_POOL_SIZE', 0) }}}$", + f"${{{ BOARD_LOADERS[board].config['CONFIG_SRAM_SIZE']*1024 - BOARD_LOADERS[board].meminfo['RAM'][0] }}}$", + f"${{{ BOARD_LOADERS[board].config['CONFIG_LLEXT_HEAP_SIZE']*1024 }}}$", + f"${{{ BOARD_LOADERS[board].config.get('CONFIG_MBEDTLS_HEAP_SIZE', '-') }}}$" + ] + + f_print("") + col_aligns = ['center', 'left', 'center', 'right', 'right', 'right', 'right', 'right', 'right'] + for index, cell in enumerate(row): + f_print(f"") + f_print("") + f_print("
BoardSoCFLASHRAMUser heapsOS heaps
SYSLIBCLLEXTMBEDTLS
\n\n{cell}\n\n
") + + extra_data_present = False + for soc in sorted(list(set([ ALL_BOARD_DATA[board]['soc'] for board in artifact_boards ]))): + soc_boards = [ board for board in artifact_boards if ALL_BOARD_DATA[board]['soc'] == soc ] + sorted_regions = sorted(r for r in REGIONS_BY_SOC[soc] if r not in ('FLASH', 'RAM')) + if not sorted_regions: + continue + + if not extra_data_present: + f_print("
SoC-specific data

\n") + extra_data_present = True + + f_print(f"") + for r in sorted_regions: + f_print(f"") + f_print("") + for board in sorted(soc_boards): + f_print(f"") + for r in sorted_regions: + if r in BOARD_LOADERS[board].meminfo: + f_print(f"") + else: + f_print(f"") + f_print("") + f_print("
{soc}Board{r}
{board}\n\n{color_entry(BOARD_LOADERS[board].meminfo[r])}\n\n
\n") + # f_print() + # for c in ('CONFIG_HEAP_MEM_POOL_SIZE', 'CONFIG_LLEXT_HEAP_SIZE', 'CONFIG_MBEDTLS_HEAP_SIZE'): + # if c in BOARD_LOADERS[board].config: + # f_print(f"{c:>25} {BOARD_LOADERS[board].config[c]:8}") + # f_print("") + + if extra_data_present: + f_print("
") # Main Logic # ---------- @@ -358,6 +504,10 @@ def print_test_matrix(artifact, artifact_boards, title, sketch_filter=lambda x: log_test(artifact, board, 'CI test', '', [], FAILURE, "Core data could not be read.") continue + soc = BOARD_LOADERS[board].config['CONFIG_SOC'] + board_data['soc'] = soc + REGIONS_BY_SOC[soc].update(BOARD_LOADERS[board].meminfo.keys()) + # Get list of expected errors for this board/variant exceptions = [] if os.path.exists(f"variants/{variant}/known_example_issues.txt"): @@ -462,6 +612,10 @@ def print_test_matrix(artifact, artifact_boards, title, sketch_filter=lambda x: print_test_matrix(artifact, artifact_boards, "tests", sketch_filter=lambda res: res.status in (PASS, WARNING)) f_print("\n") + f_print(f"
Memory usage report for {artifact}

") + print_mem_report(artifact, artifact_boards) + f_print("
") + with open(results_file, 'w') as f: if ci_run_passed: f.write('PASSED\n') From 9d27900123c6baa6636e06c1cc69e393ec355ac1 Mon Sep 17 00:00:00 2001 From: Luca Burelli Date: Fri, 9 Jan 2026 15:34:38 +0100 Subject: [PATCH 12/22] testing: test both static and dynamic linking --- .github/workflows/package_core.yml | 10 ++-- extra/ci_inspect_logs.py | 89 +++++++++++++++--------------- 2 files changed, 52 insertions(+), 47 deletions(-) diff --git a/.github/workflows/package_core.yml b/.github/workflows/package_core.yml index 33f6c9485..7a58873b5 100644 --- a/.github/workflows/package_core.yml +++ b/.github/workflows/package_core.yml @@ -18,6 +18,7 @@ jobs: CORE_TAG: ${{ env.CORE_TAG }} CORE_HASH: ${{ env.CORE_HASH }} ALL_BOARD_DATA: ${{ env.ALL_BOARD_DATA }} + ALL_BOARD_FQBNS: ${{ env.ALL_BOARD_FQBNS }} ARTIFACTS: ${{ env.ARTIFACTS }} SUB_ARCHES: ${{ env.SUB_ARCHES }} steps: @@ -46,6 +47,7 @@ jobs: # needs the above env vars to be usable run: | echo "CORE_TAG=$(git describe --tags --exact-match 2>/dev/null || echo $CORE_HASH)" >> "$GITHUB_ENV" + echo "ALL_BOARD_FQBNS=$(jq -c 'map((. + {link_mode: "static"}), (. + {link_mode: "dynamic"}))' <<< ${ALL_BOARD_DATA})" >> "$GITHUB_ENV" echo "ARTIFACTS=$(jq -c '["zephyr"] + (map(.artifact) | unique)' <<< ${ALL_BOARD_DATA})" >> "$GITHUB_ENV" echo "SUB_ARCHES=$(jq -c 'map(.subarch) | unique' <<< ${ALL_BOARD_DATA})" >> "$GITHUB_ENV" @@ -214,7 +216,7 @@ jobs: test-core: # NOTE: this name is hardcoded in ci_inspect_logs.py - name: Test ${{ matrix.board }} + name: Test ${{ matrix.board }}:${{ matrix.link_mode }} runs-on: ubuntu-latest needs: - build-env @@ -222,13 +224,13 @@ jobs: strategy: matrix: include: - ${{ fromJSON( needs.build-env.outputs.ALL_BOARD_DATA ) }} + ${{ fromJSON( needs.build-env.outputs.ALL_BOARD_FQBNS ) }} fail-fast: false env: PLAT: arduino:${{ matrix.subarch }} - FQBN: arduino:${{ matrix.subarch }}:${{ matrix.board }} + FQBN: arduino:${{ matrix.subarch }}:${{ matrix.board }}:link_mode=${{ matrix.link_mode }} CORE_ARTIFACT: ArduinoCore-${{ matrix.artifact }}-${{ needs.build-env.outputs.CORE_HASH }} - ARTIFACT_TAG: ${{ needs.build-env.outputs.CORE_HASH }}-${{ matrix.board }} + ARTIFACT_TAG: ${{ needs.build-env.outputs.CORE_HASH }}-${{ matrix.board }}-${{ matrix.link_mode }} if: ${{ !cancelled() && needs.build-env.result == 'success' }} steps: - uses: actions/checkout@v4 diff --git a/extra/ci_inspect_logs.py b/extra/ci_inspect_logs.py index 308882b98..3affadd03 100755 --- a/extra/ci_inspect_logs.py +++ b/extra/ci_inspect_logs.py @@ -102,10 +102,11 @@ def read_meminfo(self): # Single test data structure class TestEntry: - def __init__(self, artifact, board, sketch, status, issues, job_link): + def __init__(self, artifact, board, sketch, link_mode, status, issues, job_link): self.artifact = artifact self.board = board self.sketch = sketch + self.link_mode = link_mode self.status = status self.issues = issues self.job_link = job_link @@ -149,7 +150,7 @@ def track(self, test_entry): BOARD_TESTS = defaultdict(TestGroup) # { board: TestGroup() } SKETCH_TESTS = defaultdict(lambda: defaultdict(TestGroup)) # { artifact: { sketch: TestGroup() } } -def log_test(artifact, board, sketch, exceptions, status, issues, job_link=None): +def log_test(artifact, board, sketch, link_mode, exceptions, status, issues, job_link=None): """ Logs individual test results into the global test tracking structures. """ @@ -163,7 +164,7 @@ def log_test(artifact, board, sketch, exceptions, status, issues, job_link=None) status = EXPECTED_ERROR # Create the test entry - test_entry = TestEntry(artifact, board, sketch, status, issues, job_link) + test_entry = TestEntry(artifact, board, sketch, link_mode, status, issues, job_link) # Track in global structures ARTIFACT_TESTS[artifact].track(test_entry) @@ -351,7 +352,7 @@ def print_test_matrix(artifact, artifact_boards, title, sketch_filter=lambda x: # Test logs by board for test in sorted(res.tests_with_issues, key=lambda x: x.status, reverse=True): - test_text = f"{test.board}" + test_text = f"{test.board}:{test.link_mode}" if test.job_link: test_text += f" (
full log)" f_print(f"{TEST_STATUS[test.status]} {test_text}
\n\n```") @@ -517,46 +518,48 @@ def print_mem_report(artifact, artifact_boards): if sketch_pattern: exceptions.append(re.compile(f"^(ArduinoCore-zephyr/)?{sketch_pattern}")) - # Get job link for this test - job_link = JOB_URLS.get(f"Test {board}") - if job_link: - job_link += "#step:6:1" - - # Extract data from the report file - report_file = f"arduino-{subarch}-{board}.json" - if not os.path.exists(report_file): - log_test(artifact, board, 'CI test', '', [], FAILURE, f"Report file not found.", job_link) - continue # Skip to the next board - - try: - with open(report_file, 'r') as f: - report_data = json.load(f) - except Exception as e: - log_test(artifact, board, 'CI test', '', [], FAILURE, f"Error reading report file: {e}", job_link) - continue # Skip to the next board - - reports = report_data.get('boards', [{}])[0].get('sketches', []) - if not reports: - log_test(artifact, board, 'CI test', '', [], FAILURE, "Test report is empty, check CI log.", job_link) - continue # Skip to the next board - - # Iterate through individual sketch reports - for report in reports: - sketch = report.get('name', 'unknown_sketch') - success = report.get('compilation_success', False) - issues = report.get('issues', []) - - # Replace long absolute paths with '...' for brevity. - sketch_issues = [ re.sub(r'(/.+?)((/[^/]+){3}):', r'...\2:', issue) for issue in issues ] - - if not success: - status = ERROR - elif len(sketch_issues): # Implies warnings/non-critical issues - status = WARNING - else: - status = PASS + # Get raw data from report file + for link_mode in ("static", "dynamic"): + # Get job link for this test + job_link = JOB_URLS.get(f"Test {board}:{link_mode}") + if job_link: + job_link += "#step:6:1" + + # Extract data from the report file + report_file = f"arduino-{subarch}-{board}-link_mode={link_mode}.json" + if not os.path.exists(report_file): + log_test(artifact, board, 'CI test', '', [], FAILURE, f"Report file for {link_mode} not found.", job_link) + continue # Skip to the next board + + try: + with open(report_file, 'r') as f: + report_data = json.load(f) + except Exception as e: + log_test(artifact, board, 'CI test', '', [], FAILURE, f"Error reading {link_mode} report file: {e}", job_link) + continue # Skip to the next board + + reports = report_data.get('boards', [{}])[0].get('sketches', []) + if not reports: + log_test(artifact, board, 'CI test', '', [], FAILURE, "Test report for {link_mode} is empty, check CI log.", job_link) + continue # Skip to the next board + + # Iterate through individual sketch reports + for report in reports: + sketch = report.get('name', 'unknown_sketch') + success = report.get('compilation_success', False) + issues = report.get('issues', []) + + # Replace long absolute paths with '...' for brevity. + sketch_issues = [ re.sub(r'(/.+?)((/[^/]+){3}):', r'...\2:', issue) for issue in issues ] + + if not success: + status = ERROR + elif len(sketch_issues): # Implies warnings/non-critical issues + status = WARNING + else: + status = PASS - log_test(artifact, board, sketch, exceptions, status, sketch_issues, job_link) + log_test(artifact, board, sketch, link_mode, exceptions, status, sketch_issues, job_link) ARTIFACTS = sorted(ARTIFACT_TESTS.keys()) From a7aac3f9f98dfe2aa5d2db869b0aa4b0ac3d0d95 Mon Sep 17 00:00:00 2001 From: Luca Burelli Date: Fri, 9 Jan 2026 15:35:11 +0100 Subject: [PATCH 13/22] package_core: write PR comment with status --- .github/workflows/leave_pr_comment.yml | 60 ++++++++++++++++++++++++++ .github/workflows/package_core.yml | 16 +++++++ 2 files changed, 76 insertions(+) create mode 100644 .github/workflows/leave_pr_comment.yml diff --git a/.github/workflows/leave_pr_comment.yml b/.github/workflows/leave_pr_comment.yml new file mode 100644 index 000000000..8469627a3 --- /dev/null +++ b/.github/workflows/leave_pr_comment.yml @@ -0,0 +1,60 @@ +# SPDX-License-Identifier: Apache-2.0 + +name: Leave PR comment + +on: + workflow_run: + workflows: ["Package, test and upload core"] + types: + - completed + +permissions: + contents: read + pull-requests: write + +jobs: + comment-on-pr: + runs-on: ubuntu-latest + steps: + - name: Download artifact + uses: dawidd6/action-download-artifact@ac66b43f0e6a346234dd65d4d0c8fbb31cb316e5 # v11 + with: + workflow: package-core.yml + run_id: ${{ github.event.workflow_run.id }} + name: comment-request + if_no_artifact_found: ignore + + - name: Load PR number + run: | + echo "PR_NUM=$(cat pr_number || true)" >> $GITHUB_ENV + + - name: Check PR number + id: check-pr + uses: carpentries/actions/check-valid-pr@2e20fd5ee53b691e27455ce7ca3b16ea885140e8 # v0.15.0 + with: + pr: ${{ env.PR_NUM }} + sha: ${{ github.event.workflow_run.head_sha }} + + - name: Validate PR number + if: ${{ steps.check-pr.outputs.VALID != 'true' }} + run: | + echo "ABORT: PR number validation failed!" + exit 1 + + - name: Update PR comment + if: ${{ steps.check-pr.outputs.VALID == 'true' }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GH_REPO: ${{ github.repository }} + run: | + BODY="$(cat comment_body || echo '# :fire: CI failure processing logs!')" + + # delete existing comment, if present, then add a new one + gh pr comment ${PR_NUM} --delete-last --yes || true + gh pr comment ${PR_NUM} --body "${BODY}" + + - name: Clean up intermediate artifacts + if: ${{ steps.check-pr.outputs.VALID == 'true' }} + uses: geekyeggo/delete-artifact@v5.1.0 + with: + name: comment-request diff --git a/.github/workflows/package_core.yml b/.github/workflows/package_core.yml index 7a58873b5..b3eb177df 100644 --- a/.github/workflows/package_core.yml +++ b/.github/workflows/package_core.yml @@ -323,6 +323,22 @@ jobs: cat full_log >> $GITHUB_STEP_SUMMARY echo "CI_RESULT=$(cat result)" > $GITHUB_OUTPUT + if [ "${{ github.event_name }}" == "pull_request" ]; then + mkdir -p comment-request + echo "${{ github.event.pull_request.number }}" > comment-request/pr_number + echo -e "## Built \`$(extra/get_core_version.sh)\`\n" > comment-request/comment_body + echo >> comment-request/comment_body + cat summary | sed -e 's!\${\\color{\S*}\(.*\)}\$!\1!g' -e 's!\\%!%!g' >> comment-request/comment_body + fi + + - name: Archive comment information + uses: actions/upload-artifact@v4 + if: ${{ github.event_name == 'pull_request' }} + with: + name: comment-request + path: comment-request/ + retention-days: 1 + - name: Clean up intermediate artifacts uses: geekyeggo/delete-artifact@v5.1.0 with: From 38d04a50add9442c92ef6fdbdfd5fa95cc13016d Mon Sep 17 00:00:00 2001 From: Luca Burelli Date: Mon, 12 Jan 2026 12:07:02 +0100 Subject: [PATCH 14/22] fix: report proper sketch and loader sizes in CI packaging --- .github/workflows/package_core.yml | 8 +++++++- extra/build.sh | 1 + extra/ci_inspect_logs.py | 6 ++++-- extra/package_core.sh | 12 +++++++++++- loader/main.c | 15 +++++++++++++++ 5 files changed, 38 insertions(+), 4 deletions(-) diff --git a/.github/workflows/package_core.yml b/.github/workflows/package_core.yml index b3eb177df..def653430 100644 --- a/.github/workflows/package_core.yml +++ b/.github/workflows/package_core.yml @@ -117,9 +117,15 @@ jobs: grep -i "warning:" $REPORT.stdout > $REPORT.warnings || true # extract the memory usage table (from the header to the first non-% line) + # override the size of the Flash with the size of the loader partition + # and add the size of the sketch partition (not reported by Zephyr) + LOADER_SIZE=$(( $(cat variants/${{ matrix.variant }}/syms-static.ld | grep '_loader_max_size' | cut -d '=' -f 2 | tr -d ') ;') )) + SKETCH_SIZE=$(( $(cat variants/${{ matrix.variant }}/syms-static.ld | grep '_sketch_max_size' | cut -d '=' -f 2 | tr -d ') ;') )) cat $REPORT.stdout | sed -n '/^Memory region/,/^[^%]*$/p' | head -n -1 \ | awk 'BEGIN {split("B KB MB GB", u); for(i in u) m[u[i]]=1024^(i-1)} /:/ {print "[\"" $1 "\"," $2*m[$3] "," $4*m[$5] "]"}' \ - | sort | jq -s > $REPORT.meminfo + | sort \ + | jq -s "map((select(.[0] == \"FLASH:\") | .[2]) |= ${LOADER_SIZE}) + [ [ \"SKETCH:\", 0, ${SKETCH_SIZE} ] ]" > $REPORT.meminfo + jq - name: Package board artifacts if: ${{ !cancelled() }} diff --git a/extra/build.sh b/extra/build.sh index 22700dc81..6c35f1982 100755 --- a/extra/build.sh +++ b/extra/build.sh @@ -106,6 +106,7 @@ extra/gen_provides.py "${BUILD_DIR}/zephyr/zephyr.elf" -LF \ "+kheap__system_heap" \ "*sketch_base_addr=_sketch_start" \ "*sketch_max_size=_sketch_max_size" \ + "*loader_max_size=_loader_max_size" \ "malloc=__wrap_malloc" \ "free=__wrap_free" \ "realloc=__wrap_realloc" \ diff --git a/extra/ci_inspect_logs.py b/extra/ci_inspect_logs.py index 3affadd03..a8097dbb3 100755 --- a/extra/ci_inspect_logs.py +++ b/extra/ci_inspect_logs.py @@ -394,6 +394,7 @@ def print_mem_report(artifact, artifact_boards): f_print("SoC", end='') f_print("FLASH", end='') f_print("RAM", end='') + f_print("User
sketch", end='') f_print("User heaps", end='') f_print("OS heaps", end='') f_print("") @@ -410,6 +411,7 @@ def print_mem_report(artifact, artifact_boards): f"{soc}", color_entry(BOARD_LOADERS[board].meminfo['FLASH']), color_entry(BOARD_LOADERS[board].meminfo['RAM']), + f"${{{ BOARD_LOADERS[board].meminfo['SKETCH'][1] }}}$", f"${{{ BOARD_LOADERS[board].config.get('CONFIG_HEAP_MEM_POOL_SIZE', 0) }}}$", f"${{{ BOARD_LOADERS[board].config['CONFIG_SRAM_SIZE']*1024 - BOARD_LOADERS[board].meminfo['RAM'][0] }}}$", f"${{{ BOARD_LOADERS[board].config['CONFIG_LLEXT_HEAP_SIZE']*1024 }}}$", @@ -417,7 +419,7 @@ def print_mem_report(artifact, artifact_boards): ] f_print("") - col_aligns = ['center', 'left', 'center', 'right', 'right', 'right', 'right', 'right', 'right'] + col_aligns = ['center', 'left', 'center', 'right', 'right', 'right', 'right', 'right', 'right', 'right'] for index, cell in enumerate(row): f_print(f"\n\n{cell}\n\n") f_print("") @@ -426,7 +428,7 @@ def print_mem_report(artifact, artifact_boards): extra_data_present = False for soc in sorted(list(set([ ALL_BOARD_DATA[board]['soc'] for board in artifact_boards ]))): soc_boards = [ board for board in artifact_boards if ALL_BOARD_DATA[board]['soc'] == soc ] - sorted_regions = sorted(r for r in REGIONS_BY_SOC[soc] if r not in ('FLASH', 'RAM')) + sorted_regions = sorted(r for r in REGIONS_BY_SOC[soc] if r not in ('FLASH', 'RAM', 'SKETCH')) if not sorted_regions: continue diff --git a/extra/package_core.sh b/extra/package_core.sh index 02a24ddee..245b7e23e 100755 --- a/extra/package_core.sh +++ b/extra/package_core.sh @@ -49,6 +49,16 @@ for board in $EXCLUDED_BOARDS ; do # remove (even commented) lines for excluded boards sed -i "/^\(\s*#\s*\)\?${board}\./d" $TEMP_BOARDS done +# set proper maximum sizes for included variants +for variant in $INCLUDED_VARIANTS ; do + board=$(echo ${BOARD_DETAILS} | jq -cr "map(select(.variant == \"${variant}\")) | .[0].board") + # maximum sketch size: size of sketch partition (exact limit) + # maximum data size: configured LLEXT heap size (larger bound, real limit is smaller) + CODE_SIZE=$(( $(cat variants/${variant}/syms-static.ld | grep '_sketch_max_size' | cut -d '=' -f 2 | tr -d ');') )) + DATA_SIZE=$(( 1024*$(cat firmwares/zephyr-${variant}.config | grep 'LLEXT_HEAP_SIZE' | cut -d '=' -f 2) )) + sed -i -e "s/^${board}\.upload\.maximum_size=.*/${board}.upload.maximum_size=${CODE_SIZE}/" $TEMP_BOARDS + sed -i -e "s/^${board}\.upload\.maximum_data_size=.*/${board}.upload.maximum_data_size=${DATA_SIZE}/" $TEMP_BOARDS +done # remove multiple empty lines sed -i '/^$/N;/^\n$/D' $TEMP_BOARDS @@ -56,7 +66,7 @@ sed -i '/^$/N;/^\n$/D' $TEMP_BOARDS TEMP_PLATFORM=$(mktemp -p . | sed 's/\.\///') cat platform.txt > ${TEMP_PLATFORM} [ -z "$ARTIFACT_NAME" ] || sed -ie "s/^name=.*/name=${ARTIFACT_NAME}/" ${TEMP_PLATFORM} -sed -ie "s/^version=.*/version=$(extra/get_core_version.sh)/" ${TEMP_PLATFORM} +sed -i -e "s/^version=.*/version=$(extra/get_core_version.sh)/" ${TEMP_PLATFORM} declutter_file() { # remove comments, whitespace at EOL, '/' dir terminators and empty lines diff --git a/loader/main.c b/loader/main.c index 7276f303e..cac8669ac 100644 --- a/loader/main.c +++ b/loader/main.c @@ -23,6 +23,8 @@ LOG_MODULE_REGISTER(sketch); #include #include +#include + #define HEADER_LEN 16 struct sketch_header_v1 { @@ -89,10 +91,23 @@ void llext_entry(void *arg0, void *arg1, void *arg2) { } #endif /* CONFIG_USERSPACE */ +/* Export Flash parameters for use by core building scripts */ __attribute__((retain)) const uintptr_t sketch_base_addr = DT_REG_ADDR(DT_GPARENT(DT_NODELABEL(user_sketch))) + DT_REG_ADDR(DT_NODELABEL(user_sketch)); __attribute__((retain)) const uintptr_t sketch_max_size = DT_REG_SIZE(DT_NODELABEL(user_sketch)); +#if DT_HAS_FIXED_PARTITION_LABEL(image_0) + #define LOADER_MAX_SIZE DT_REG_SIZE(DT_NODE_BY_FIXED_PARTITION_LABEL(image_0)) +#elif CONFIG_FLASH_LOAD_SIZE > 0 + #define LOADER_MAX_SIZE CONFIG_FLASH_LOAD_SIZE +#else + #ifndef CONFIG_FLASH_LOAD_OFFSET + #define CONFIG_FLASH_LOAD_OFFSET 0 + #endif + #define LOADER_MAX_SIZE (DT_REG_SIZE(DT_NODELABEL(flash0)) - CONFIG_FLASH_LOAD_OFFSET) +#endif +__attribute__((retain)) const uintptr_t loader_max_size = LOADER_MAX_SIZE; + static int loader(const struct shell *sh) { const struct flash_area *fa; int rc; From 97763be27a88c7a786d9c755d3ecb7627293ba3c Mon Sep 17 00:00:00 2001 From: Luca Burelli Date: Mon, 12 Jan 2026 18:15:46 +0100 Subject: [PATCH 15/22] fix partitions --- .../arduino_nano_matter_mgm240sd22vna.overlay | 15 ++---------- .../arduino_nicla_sense_me_nrf52832.overlay | 10 ++++++++ .../arduino_uno_q_stm32u585xx.overlay | 23 +++++++++++++++---- 3 files changed, 31 insertions(+), 17 deletions(-) diff --git a/variants/arduino_nano_matter_mgm240sd22vna/arduino_nano_matter_mgm240sd22vna.overlay b/variants/arduino_nano_matter_mgm240sd22vna/arduino_nano_matter_mgm240sd22vna.overlay index 1830d3617..59136b0fa 100644 --- a/variants/arduino_nano_matter_mgm240sd22vna/arduino_nano_matter_mgm240sd22vna.overlay +++ b/variants/arduino_nano_matter_mgm240sd22vna/arduino_nano_matter_mgm240sd22vna.overlay @@ -1,16 +1,5 @@ -&flash0 { - partitions { - compatible = "fixed-partitions"; - #address-cells = <1>; - #size-cells = <1>; - - /* Arduino user sketch partition */ - user_sketch: partition@C4000 { - reg = <0x000C4000 0x000B8000>; - label = "user"; - }; - }; -}; +/* Arduino user sketch partition */ +user_sketch: &slot1_partition {}; / { zephyr,user { diff --git a/variants/arduino_nicla_sense_me_nrf52832/arduino_nicla_sense_me_nrf52832.overlay b/variants/arduino_nicla_sense_me_nrf52832/arduino_nicla_sense_me_nrf52832.overlay index c7443b678..6927a3376 100644 --- a/variants/arduino_nicla_sense_me_nrf52832/arduino_nicla_sense_me_nrf52832.overlay +++ b/variants/arduino_nicla_sense_me_nrf52832/arduino_nicla_sense_me_nrf52832.overlay @@ -1,5 +1,15 @@ &flash0 { partitions { + /delete-node/ slot0_partition; + /delete-node/ slot1_partition; + /delete-node/ scratch_partition; + /delete-node/ storage_partition; + + slot0_partition: partition@10000 { + label = "image-0"; /* in zephyr/boards/arduino/nicla_sense_me/arduino_nicla_sense_me.dts:154 */ + reg = < 0x10000 0x60000 >; /* in zephyr/boards/arduino/nicla_sense_me/arduino_nicla_sense_me.dts:155 */ + }; + user_sketch: partition@70000 { label = "user"; reg = <0x070000 0x10000>; diff --git a/variants/arduino_uno_q_stm32u585xx/arduino_uno_q_stm32u585xx.overlay b/variants/arduino_uno_q_stm32u585xx/arduino_uno_q_stm32u585xx.overlay index b37c40282..528abf38a 100644 --- a/variants/arduino_uno_q_stm32u585xx/arduino_uno_q_stm32u585xx.overlay +++ b/variants/arduino_uno_q_stm32u585xx/arduino_uno_q_stm32u585xx.overlay @@ -56,11 +56,26 @@ &flash0 { partitions { - bootanimation: partition@90000 { - reg = <0x090000 DT_SIZE_K(64)>; + /delete-node/ slot0_partition; + /delete-node/ slot1_partition; + /delete-node/ scratch_partition; + /delete-node/ storage_partition; + + slot0_partition: partition@10000 { + label = "image-0"; + reg = < 0x10000 DT_SIZE_K(768) >; }; - user_sketch: partition@f0000 { - reg = <0x0F0000 DT_SIZE_K(64)>; + + bootanimation: partition@d0000 { + reg = < 0xd0000 DT_SIZE_K(192) >; + }; + + user_sketch: partition@100000 { + reg = < 0x100000 DT_SIZE_K(768) >; + }; + + storage_partition: partition@1c0000 { + reg = < 0x1c0000 DT_SIZE_K(256) >; }; }; }; From 72fb7e12c6dcd4251cab22491c6487f338de5aee Mon Sep 17 00:00:00 2001 From: Luca Burelli Date: Tue, 13 Jan 2026 15:52:03 +0100 Subject: [PATCH 16/22] step --- extra/ci_inspect_logs.py | 51 +++++++++++++++++++++++----------------- 1 file changed, 30 insertions(+), 21 deletions(-) diff --git a/extra/ci_inspect_logs.py b/extra/ci_inspect_logs.py index a8097dbb3..8868fb3e2 100755 --- a/extra/ci_inspect_logs.py +++ b/extra/ci_inspect_logs.py @@ -102,28 +102,42 @@ def read_meminfo(self): # Single test data structure class TestEntry: - def __init__(self, artifact, board, sketch, link_mode, status, issues, job_link): + def __init__(self, artifact, board, sketch, link_mode, excepted, status, issues, job_link): self.artifact = artifact self.board = board self.sketch = sketch self.link_mode = link_mode - self.status = status + self.excepted = excepted + self.status = EXPECTED_ERROR if excepted and status == ERROR else status self.issues = issues self.job_link = job_link + self.invalid_exception = excepted and status in (PASS, WARNING) + + # (1.................) (2....) (3................) (4.) + match = re.search(r'(libraries|examples)/([^/]+)/(examples/|extras/)?(.*)', sketch) + if match: + self.group = match.group(2) + self.name = match.group(4) + else: + self.group = "" + self.name = sketch # Summary data structure class TestGroup: def __init__(self): - # Sets to track unique board and sketch names + # Sets to track unique board, sketch and group names self.boards = set() self.sketches = set() # Counts of test results by status self.counts = { status : 0 for status in [PASS, WARNING, EXPECTED_ERROR, ERROR, FAILURE] } # Overall status of the group self.status = SKIP - # List of individual TestEntry objects (all, only with issues) + # List of individual TestEntry objects by feature self.tests = [] self.tests_with_issues = [] + self.tests_with_invalid_exceptions = [] + # Tests grouped by result + self.tests_by_group = defaultdict(set) # { group: set(name, res) } def track(self, test_entry): """ @@ -132,11 +146,14 @@ def track(self, test_entry): self.tests.append(test_entry) if test_entry.issues: self.tests_with_issues.append(test_entry) + if test_entry.invalid_exception: + self.tests_with_invalid_exceptions.append(test_entry) self.counts[test_entry.status] += 1 self.status = max(self.status, test_entry.status) self.boards.add(test_entry.board) self.sketches.add(test_entry.sketch) + self.tests_by_group[test_entry.group].add((test_entry.name, test_entry)) # Global Data Structures # ---------------------- @@ -159,12 +176,9 @@ def log_test(artifact, board, sketch, link_mode, exceptions, status, issues, job if isinstance(issues, str): issues = [ issues ] - # Adjust the status for expected errors - if status == ERROR and any(pattern.match(sketch) for pattern in exceptions): - status = EXPECTED_ERROR - # Create the test entry - test_entry = TestEntry(artifact, board, sketch, link_mode, status, issues, job_link) + excepted = any(pattern.match(sketch) for pattern in exceptions) + test_entry = TestEntry(artifact, board, sketch, link_mode, excepted, status, issues, job_link) # Track in global structures ARTIFACT_TESTS[artifact].track(test_entry) @@ -287,18 +301,10 @@ def print_test_matrix(artifact, artifact_boards, title, sketch_filter=lambda x: if not sketch_filter(res): continue - # (1.................) (2....) (3................) (4.) - match = re.search(r'(libraries|examples)/([^/]+)/(examples/|extras/)?(.*)', sketch) - if match: - group = match.group(2) - sample = match.group(4) - else: - group = "" - sample = sketch - - if group not in sketch_groups: - sketch_groups[group] = [] - sketch_groups[group].append((sample, res)) + for group in res.groups: + if group not in sketch_groups: + sketch_groups[group] = [] + sketch_groups[group].append((sample, res)) # Build the data rows, grouping libraries together. Each row corresponds to # a sketch, each cell to the test result icon of that sketch on that board. @@ -606,6 +612,9 @@ def print_mem_report(artifact, artifact_boards): print_test_matrix(artifact, artifact_boards, "issues", sketch_filter=lambda res: res.status in (ERROR, EXPECTED_ERROR)) + for test in ARTIFACT_TESTS[artifact].tests_with_invalid_exceptions: + f_print(":warning: Invalid exception for {test.sketch} on {test.board}") + successful_tests = ARTIFACT_TESTS[artifact].counts[PASS] + ARTIFACT_TESTS[artifact].counts[WARNING] warning_tests = ARTIFACT_TESTS[artifact].counts[WARNING] if successful_tests: From 7c31bbfbd4174a081c2067e6ce42b3885eec9639 Mon Sep 17 00:00:00 2001 From: Luca Burelli Date: Wed, 14 Jan 2026 09:32:45 +0100 Subject: [PATCH 17/22] step --- extra/ci_inspect_logs.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/extra/ci_inspect_logs.py b/extra/ci_inspect_logs.py index 8868fb3e2..e813705d3 100755 --- a/extra/ci_inspect_logs.py +++ b/extra/ci_inspect_logs.py @@ -324,11 +324,15 @@ def print_test_matrix(artifact, artifact_boards, title, sketch_filter=lambda x: row_data += f"{name_link}" for board in artifact_boards: - test = next((test for test in res.tests if test.board == board), None) - status = test.status if test else SKIP - issues = test.issues if test else "" - - row_data += f"{TEST_STATUS[status]}" + # there are multiple tests per sketch&board due to FQBN variations + # display the worst status, and flag invalid exceptions + status = max( [ t.status for t in res.tests if test.board == board ], SKIP) + invalid = any( [ t.invalid_exception for t in res.tests if test.board == board ] ) + if invalid and status in (PASS, WARNING): + status_icon = ":interrobang:" + else: + status_icon = TEST_STATUS[status] + row_data += f"{status_icon}" row_data += "" data_rows.append(row_data) @@ -613,7 +617,7 @@ def print_mem_report(artifact, artifact_boards): print_test_matrix(artifact, artifact_boards, "issues", sketch_filter=lambda res: res.status in (ERROR, EXPECTED_ERROR)) for test in ARTIFACT_TESTS[artifact].tests_with_invalid_exceptions: - f_print(":warning: Invalid exception for {test.sketch} on {test.board}") + f_print(":interrobang: Unnecessary exception for {test.group} {test.name} on {test.board}") successful_tests = ARTIFACT_TESTS[artifact].counts[PASS] + ARTIFACT_TESTS[artifact].counts[WARNING] warning_tests = ARTIFACT_TESTS[artifact].counts[WARNING] From e6929e8fd4e1308895bb412586f461c960837be1 Mon Sep 17 00:00:00 2001 From: Luca Burelli Date: Wed, 14 Jan 2026 09:53:02 +0100 Subject: [PATCH 18/22] step --- .../arduino_giga_r1_stm32h747xx_m7.overlay | 2 +- .../arduino_nicla_sense_me_nrf52832.overlay | 8 ++++---- .../arduino_uno_q_stm32u585xx.overlay | 8 ++++---- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/variants/arduino_giga_r1_stm32h747xx_m7/arduino_giga_r1_stm32h747xx_m7.overlay b/variants/arduino_giga_r1_stm32h747xx_m7/arduino_giga_r1_stm32h747xx_m7.overlay index 6aba6b4b0..9402e2256 100644 --- a/variants/arduino_giga_r1_stm32h747xx_m7/arduino_giga_r1_stm32h747xx_m7.overlay +++ b/variants/arduino_giga_r1_stm32h747xx_m7/arduino_giga_r1_stm32h747xx_m7.overlay @@ -342,7 +342,7 @@ &flash0 { partitions { - /delete-node/ slot0_partition; + /delete-node/ &slot0_partition; slot0_partition: partition@40000 { label = "image-0"; diff --git a/variants/arduino_nicla_sense_me_nrf52832/arduino_nicla_sense_me_nrf52832.overlay b/variants/arduino_nicla_sense_me_nrf52832/arduino_nicla_sense_me_nrf52832.overlay index 6927a3376..79c1de24e 100644 --- a/variants/arduino_nicla_sense_me_nrf52832/arduino_nicla_sense_me_nrf52832.overlay +++ b/variants/arduino_nicla_sense_me_nrf52832/arduino_nicla_sense_me_nrf52832.overlay @@ -1,9 +1,9 @@ &flash0 { partitions { - /delete-node/ slot0_partition; - /delete-node/ slot1_partition; - /delete-node/ scratch_partition; - /delete-node/ storage_partition; + /delete-node/ &slot0_partition; + /delete-node/ &slot1_partition; + /delete-node/ &scratch_partition; + /delete-node/ &storage_partition; slot0_partition: partition@10000 { label = "image-0"; /* in zephyr/boards/arduino/nicla_sense_me/arduino_nicla_sense_me.dts:154 */ diff --git a/variants/arduino_uno_q_stm32u585xx/arduino_uno_q_stm32u585xx.overlay b/variants/arduino_uno_q_stm32u585xx/arduino_uno_q_stm32u585xx.overlay index 528abf38a..7ca54f9d9 100644 --- a/variants/arduino_uno_q_stm32u585xx/arduino_uno_q_stm32u585xx.overlay +++ b/variants/arduino_uno_q_stm32u585xx/arduino_uno_q_stm32u585xx.overlay @@ -56,10 +56,10 @@ &flash0 { partitions { - /delete-node/ slot0_partition; - /delete-node/ slot1_partition; - /delete-node/ scratch_partition; - /delete-node/ storage_partition; + /delete-node/ &slot0_partition; + /delete-node/ &slot1_partition; + /delete-node/ &scratch_partition; + /delete-node/ &storage_partition; slot0_partition: partition@10000 { label = "image-0"; From 28b2bcc2ff3e5635d5ebfff794ee112ab25abad7 Mon Sep 17 00:00:00 2001 From: Luca Burelli Date: Wed, 14 Jan 2026 10:10:03 +0100 Subject: [PATCH 19/22] step --- extra/ci_inspect_logs.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/extra/ci_inspect_logs.py b/extra/ci_inspect_logs.py index e813705d3..7f05daf9b 100755 --- a/extra/ci_inspect_logs.py +++ b/extra/ci_inspect_logs.py @@ -125,9 +125,10 @@ def __init__(self, artifact, board, sketch, link_mode, excepted, status, issues, # Summary data structure class TestGroup: def __init__(self): - # Sets to track unique board, sketch and group names + # Sets to track unique board, sketch and (group,name) tuples self.boards = set() self.sketches = set() + self.group_names = set() # Counts of test results by status self.counts = { status : 0 for status in [PASS, WARNING, EXPECTED_ERROR, ERROR, FAILURE] } # Overall status of the group @@ -136,8 +137,6 @@ def __init__(self): self.tests = [] self.tests_with_issues = [] self.tests_with_invalid_exceptions = [] - # Tests grouped by result - self.tests_by_group = defaultdict(set) # { group: set(name, res) } def track(self, test_entry): """ @@ -153,7 +152,7 @@ def track(self, test_entry): self.status = max(self.status, test_entry.status) self.boards.add(test_entry.board) self.sketches.add(test_entry.sketch) - self.tests_by_group[test_entry.group].add((test_entry.name, test_entry)) + self.group_names.add((test_entry.group, test_entry.name)) # Global Data Structures # ---------------------- @@ -295,16 +294,14 @@ def print_test_matrix(artifact, artifact_boards, title, sketch_filter=lambda x: header_row += "" # Group sketches by library - sketch_groups = {} + sketch_groups = defaultdict(list) for sketch in ARTIFACT_TESTS[artifact].sketches: res = SKETCH_TESTS[artifact][sketch] if not sketch_filter(res): continue - for group in res.groups: - if group not in sketch_groups: - sketch_groups[group] = [] - sketch_groups[group].append((sample, res)) + for group, name in res.group_names: + sketch_groups[group].append((name, res)) # Build the data rows, grouping libraries together. Each row corresponds to # a sketch, each cell to the test result icon of that sketch on that board. @@ -616,8 +613,11 @@ def print_mem_report(artifact, artifact_boards): print_test_matrix(artifact, artifact_boards, "issues", sketch_filter=lambda res: res.status in (ERROR, EXPECTED_ERROR)) - for test in ARTIFACT_TESTS[artifact].tests_with_invalid_exceptions: - f_print(":interrobang: Unnecessary exception for {test.group} {test.name} on {test.board}") + for sketch in SKETCH_TESTS[artifact]: + res = SKETCH_TESTS[artifact][sketch] + if res.status in (PASS, WARNING): + for test in res.tests_with_invalid_exceptions: + f_print(":interrobang: Unnecessary exception for {test.group} {test.name} on {test.board}") successful_tests = ARTIFACT_TESTS[artifact].counts[PASS] + ARTIFACT_TESTS[artifact].counts[WARNING] warning_tests = ARTIFACT_TESTS[artifact].counts[WARNING] From 0009546097dc348cd7803c9e540313deaef3ae2a Mon Sep 17 00:00:00 2001 From: Luca Burelli Date: Wed, 14 Jan 2026 10:11:29 +0100 Subject: [PATCH 20/22] step --- .../arduino_giga_r1_stm32h747xx_m7.overlay | 4 ++-- .../arduino_nicla_sense_me_nrf52832.overlay | 10 +++++----- .../arduino_uno_q_stm32u585xx.overlay | 10 +++++----- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/variants/arduino_giga_r1_stm32h747xx_m7/arduino_giga_r1_stm32h747xx_m7.overlay b/variants/arduino_giga_r1_stm32h747xx_m7/arduino_giga_r1_stm32h747xx_m7.overlay index 9402e2256..109a4ed32 100644 --- a/variants/arduino_giga_r1_stm32h747xx_m7/arduino_giga_r1_stm32h747xx_m7.overlay +++ b/variants/arduino_giga_r1_stm32h747xx_m7/arduino_giga_r1_stm32h747xx_m7.overlay @@ -340,10 +340,10 @@ }; +/delete-node/ &slot0_partition; + &flash0 { partitions { - /delete-node/ &slot0_partition; - slot0_partition: partition@40000 { label = "image-0"; reg = <0x040000 0xa0000>; diff --git a/variants/arduino_nicla_sense_me_nrf52832/arduino_nicla_sense_me_nrf52832.overlay b/variants/arduino_nicla_sense_me_nrf52832/arduino_nicla_sense_me_nrf52832.overlay index 79c1de24e..da560f03b 100644 --- a/variants/arduino_nicla_sense_me_nrf52832/arduino_nicla_sense_me_nrf52832.overlay +++ b/variants/arduino_nicla_sense_me_nrf52832/arduino_nicla_sense_me_nrf52832.overlay @@ -1,10 +1,10 @@ +/delete-node/ &slot0_partition; +/delete-node/ &slot1_partition; +/delete-node/ &scratch_partition; +/delete-node/ &storage_partition; + &flash0 { partitions { - /delete-node/ &slot0_partition; - /delete-node/ &slot1_partition; - /delete-node/ &scratch_partition; - /delete-node/ &storage_partition; - slot0_partition: partition@10000 { label = "image-0"; /* in zephyr/boards/arduino/nicla_sense_me/arduino_nicla_sense_me.dts:154 */ reg = < 0x10000 0x60000 >; /* in zephyr/boards/arduino/nicla_sense_me/arduino_nicla_sense_me.dts:155 */ diff --git a/variants/arduino_uno_q_stm32u585xx/arduino_uno_q_stm32u585xx.overlay b/variants/arduino_uno_q_stm32u585xx/arduino_uno_q_stm32u585xx.overlay index 7ca54f9d9..98d9552b9 100644 --- a/variants/arduino_uno_q_stm32u585xx/arduino_uno_q_stm32u585xx.overlay +++ b/variants/arduino_uno_q_stm32u585xx/arduino_uno_q_stm32u585xx.overlay @@ -54,13 +54,13 @@ }; */ +/delete-node/ &slot0_partition; +/delete-node/ &slot1_partition; +/delete-node/ &scratch_partition; +/delete-node/ &storage_partition; + &flash0 { partitions { - /delete-node/ &slot0_partition; - /delete-node/ &slot1_partition; - /delete-node/ &scratch_partition; - /delete-node/ &storage_partition; - slot0_partition: partition@10000 { label = "image-0"; reg = < 0x10000 DT_SIZE_K(768) >; From 97534d1799af404b02a5e1db7e342ba4140dc86b Mon Sep 17 00:00:00 2001 From: Luca Burelli Date: Wed, 14 Jan 2026 10:39:04 +0100 Subject: [PATCH 21/22] step --- extra/ci_inspect_logs.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/extra/ci_inspect_logs.py b/extra/ci_inspect_logs.py index 7f05daf9b..83dc2402e 100755 --- a/extra/ci_inspect_logs.py +++ b/extra/ci_inspect_logs.py @@ -323,8 +323,8 @@ def print_test_matrix(artifact, artifact_boards, title, sketch_filter=lambda x: for board in artifact_boards: # there are multiple tests per sketch&board due to FQBN variations # display the worst status, and flag invalid exceptions - status = max( [ t.status for t in res.tests if test.board == board ], SKIP) - invalid = any( [ t.invalid_exception for t in res.tests if test.board == board ] ) + status = max( [ t.status for t in res.tests if t.board == board ], SKIP) + invalid = any( [ t.invalid_exception for t in res.tests if t.board == board ] ) if invalid and status in (PASS, WARNING): status_icon = ":interrobang:" else: From 442d583274c96929f6c86349cdec1970f6f46e4f Mon Sep 17 00:00:00 2001 From: Luca Burelli Date: Wed, 14 Jan 2026 10:59:21 +0100 Subject: [PATCH 22/22] step --- extra/ci_inspect_logs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/extra/ci_inspect_logs.py b/extra/ci_inspect_logs.py index 83dc2402e..0036d70ee 100755 --- a/extra/ci_inspect_logs.py +++ b/extra/ci_inspect_logs.py @@ -323,7 +323,7 @@ def print_test_matrix(artifact, artifact_boards, title, sketch_filter=lambda x: for board in artifact_boards: # there are multiple tests per sketch&board due to FQBN variations # display the worst status, and flag invalid exceptions - status = max( [ t.status for t in res.tests if t.board == board ], SKIP) + status = max( [ t.status for t in res.tests if t.board == board ], default=SKIP) invalid = any( [ t.invalid_exception for t in res.tests if t.board == board ] ) if invalid and status in (PASS, WARNING): status_icon = ":interrobang:"