From e205c1c8f1dae0df2bac0efe17993abf9cb71288 Mon Sep 17 00:00:00 2001 From: Jakub Kocka Date: Mon, 23 Mar 2026 09:06:30 +0100 Subject: [PATCH 1/6] fix: Added idf-component-manager to exclude_list idf-component-manager v3.0.0 does not support Python <= 3.10 https://pypi.org/project/idf-component-manager/3.0.0/ --- exclude_list.yaml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/exclude_list.yaml b/exclude_list.yaml index f513fdb..a696df7 100644 --- a/exclude_list.yaml +++ b/exclude_list.yaml @@ -140,3 +140,9 @@ # https://pypi.org/project/mcp/ - package_name: 'mcp' python: ['==3.8', '==3.9'] + +# idf-component-manager v3.0.0is not supported by Python <= 3.10 +# https://pypi.org/project/idf-component-manager/3.0.0/ +- package_name: 'idf-component-manager' + version: '==3.0.0' + python: ['<3.10'] From 5a16b671ab7fe8db1be66b052739e55eeb2d7c4f Mon Sep 17 00:00:00 2001 From: Jakub Kocka Date: Mon, 23 Mar 2026 15:57:01 +0100 Subject: [PATCH 2/6] fix: Fixed the BadZipFile wheel issues - PEP 427 --- repair_wheels.py | 19 +++++++++++++++++-- test_wheels_install.py | 38 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 55 insertions(+), 2 deletions(-) diff --git a/repair_wheels.py b/repair_wheels.py index 8afab4f..42cd492 100644 --- a/repair_wheels.py +++ b/repair_wheels.py @@ -14,6 +14,7 @@ import platform import subprocess +import zipfile from pathlib import Path from typing import Union @@ -186,6 +187,13 @@ def main() -> None: skipped_count += 1 continue + # PEP 427: wheels are zip files; invalid magic usually means truncated/corrupt CI artifact + if not zipfile.is_zipfile(wheel): + print_color(" -> Deleting file (not a valid zip / wheel archive)", Fore.RED) + wheel.unlink() + deleted_count += 1 + continue + # Clean temp directory for old_wheel in temp_dir.glob("*.whl"): old_wheel.unlink() @@ -295,14 +303,21 @@ def main() -> None: # A repaired wheel was created successfully if repaired.name != wheel.name: wheel.unlink() # Remove original - repaired.rename(wheel.parent / repaired.name) + final_path = wheel.parent / repaired.name + repaired.rename(final_path) print_color(f" -> Replaced with repaired wheel: {repaired.name}", Fore.GREEN) else: # Name unchanged wheel.unlink() repaired.rename(wheel) + final_path = wheel print_color(f" -> Repaired successfully: {repaired.name}", Fore.GREEN) - repaired_count += 1 + if not zipfile.is_zipfile(final_path): + print_color(" -> Deleting repaired output (not a valid zip archive)", Fore.RED) + final_path.unlink() + deleted_count += 1 + else: + repaired_count += 1 elif result.returncode == 0: # No repaired wheel created, but command succeeded (already compatible) print_color(" -> Keeping original wheel (already compatible)", Fore.GREEN) diff --git a/test_wheels_install.py b/test_wheels_install.py index 6531d1f..3023384 100644 --- a/test_wheels_install.py +++ b/test_wheels_install.py @@ -8,6 +8,10 @@ This script finds and installs wheels compatible with the current Python version, verifying that wheel files are valid and platform-compatible. It also checks wheels against exclude_list.yaml and removes incompatible ones. + +Wheels are ZIP archives (PEP 427). pip opens them with the zipfile module; a +BadZipFile / "Bad magic number" error means the bytes on disk are not a valid +ZIP (truncated, corrupted, or not a wheel), not that ".whl" was mistaken for ".zip". """ from __future__ import annotations @@ -15,6 +19,7 @@ import re import subprocess import sys +import zipfile from pathlib import Path @@ -141,6 +146,23 @@ def is_compatibility_error(error_message: str) -> bool: return any(err in error_message for err in compatibility_errors) +def is_corrupt_wheel_archive_error(error_message: str) -> bool: + """True if pip failed because the file is not a readable ZIP / wheel archive.""" + markers = ( + "BadZipFile", + "Bad magic number for file header", + "has an invalid wheel", + "zipfile.BadZipFile", + ) + return any(m in error_message for m in markers) + + +def discard_corrupt_wheel(wheel_path: Path, note: str) -> None: + """Remove wheel from the test tree and print a single-line warning.""" + wheel_path.unlink(missing_ok=True) + print_color(f"-- {wheel_path.name} ({note})", Fore.YELLOW) + + def main() -> int: python_version_tag = get_python_version_tag() python_version = f"{sys.version_info.major}.{sys.version_info.minor}" @@ -187,12 +209,18 @@ def main() -> int: installed = 0 failed = 0 deleted = 0 + discarded_corrupt = 0 failed_wheels = [] deleted_wheels = [] print_color("---------- INSTALL WHEELS ----------") for wheel_path in wheels_to_install: + if not zipfile.is_zipfile(wheel_path): + discarded_corrupt += 1 + discard_corrupt_wheel(wheel_path, "invalid zip — not a valid wheel file (PEP 427)") + continue + success, error_message = install_wheel(wheel_path) if success: @@ -204,6 +232,11 @@ def main() -> int: deleted_wheels.append(wheel_path.name) wheel_path.unlink() print_color(f"-- {wheel_path.name} (compatibility constraint)", Fore.YELLOW) + elif is_corrupt_wheel_archive_error(error_message): + # Truncated/corrupt artifact or bad repair output; same handling as incompatible: + # drop from this test artifact so CI can continue (see module docstring). + discarded_corrupt += 1 + discard_corrupt_wheel(wheel_path, "invalid / corrupt zip (pip could not read wheel)") else: failed += 1 failed_wheels.append((wheel_path.name, error_message)) @@ -221,6 +254,11 @@ def main() -> int: print_color(f"Excluded {excluded} wheels (exclude_list.yaml)", Fore.YELLOW) if deleted > 0: print_color(f"Deleted {deleted} wheels (compatibility constraint)", Fore.YELLOW) + if discarded_corrupt > 0: + print_color( + f"Discarded {discarded_corrupt} wheels (invalid or corrupt zip archive)", + Fore.YELLOW, + ) if failed > 0: print_color(f"Failed {failed} wheels", Fore.RED) From 7164a482c83934875051a446ecfd826b558c5f2c Mon Sep 17 00:00:00 2001 From: Jakub Kocka Date: Tue, 24 Mar 2026 12:17:33 +0100 Subject: [PATCH 3/6] feat: Added esptool dependencies fetch for assembling requirements --- .github/workflows/unit-tests.yml | 2 +- build_requirements.txt | 1 + build_wheels.py | 16 ++++++++++++++++ 3 files changed, 18 insertions(+), 1 deletion(-) diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index aee50aa..e79b428 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -32,7 +32,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install packaging pyyaml colorama requests + python -m pip install -r build_requirements.txt - name: Run unit tests run: python -m unittest discover -s . -v diff --git a/build_requirements.txt b/build_requirements.txt index 08e4834..0f2849f 100644 --- a/build_requirements.txt +++ b/build_requirements.txt @@ -4,6 +4,7 @@ requests~=2.31.0 packaging~=23.2 PyYAML~=6.0.1 colorama~=0.4.6 +tomli; python_version < "3.11" # ----- build process ----- boto3~=1.34.4 diff --git a/build_wheels.py b/build_wheels.py index 861537a..e75ede4 100644 --- a/build_wheels.py +++ b/build_wheels.py @@ -16,6 +16,11 @@ import requests +try: + import tomllib +except ImportError: # Python < 3.11 does not have tomllib built-in module + import tomli as tomllib + from colorama import Fore from packaging.requirements import InvalidRequirement from packaging.requirements import Requirement @@ -35,6 +40,8 @@ IDF_RESOURCES_URL = "https://raw.githubusercontent.com/espressif/esp-idf/" # URL for IDF master CMAKE version file IDF_MASTER_VERSION_URL = f"{IDF_RESOURCES_URL}master/tools/cmake/version.cmake" +# URL for esptool pyproject.toml file +ESPTOOL_PYPROJECT_URL = "https://raw.githubusercontent.com/espressif/esptool/master/pyproject.toml" # Minimal IDF release version to take requirements from (v{MAJOR}.{MINOR}) # Requirements from all release branches and master equal or above this will be considered @@ -151,6 +158,15 @@ def _download_branch_requirements(branch: str, idf_requirements_json: dict) -> L if check_response(res, f"Failed to download feature (requirement group) '{feature['name']}'"): requirements_txt += res.text.splitlines() print(f"Added ESP-IDF {feature['name']} requirements") + + # Download esptool requirements from pyproject.toml file + res = requests.get(ESPTOOL_PYPROJECT_URL, headers=AUTH_HEADER, timeout=10) + if check_response(res, "Failed to download esptool pyproject.toml file"): + pyproject_content = tomllib.loads(res.text) + esptool_deps = pyproject_content.get("project", {}).get("dependencies", []) + requirements_txt += [dep for dep in esptool_deps if dep not in requirements_txt] + print("Added esptool requirements") + return requirements_txt From d7611dd9a1e7e5978f3daed3625032f556a306aa Mon Sep 17 00:00:00 2001 From: Jakub Kocka Date: Wed, 25 Mar 2026 12:18:40 +0100 Subject: [PATCH 4/6] feat: Extending cPython builds to match all the interpreter envs --- .../build-wheels-python-dependent.yml | 10 ++- _helper_functions.py | 14 +++- build_wheels_from_file.py | 80 ++++++++++++++++++- exclude_list.yaml | 5 ++ 4 files changed, 101 insertions(+), 8 deletions(-) diff --git a/.github/workflows/build-wheels-python-dependent.yml b/.github/workflows/build-wheels-python-dependent.yml index 7764707..db6744a 100644 --- a/.github/workflows/build-wheels-python-dependent.yml +++ b/.github/workflows/build-wheels-python-dependent.yml @@ -18,6 +18,8 @@ jobs: runs-on: ${{ matrix.runner }} env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + # PyO3 (cryptography, etc.): allow building against CPython newer than PyO3's declared max when using stable ABI + PYO3_USE_ABI3_FORWARD_COMPATIBILITY: "1" strategy: fail-fast: false matrix: @@ -148,7 +150,7 @@ jobs: bash os_dependencies/linux_arm.sh # Source Rust environment after installation . \$HOME/.cargo/env - python build_wheels_from_file.py dependent_requirements_${{ matrix.arch }} + python build_wheels_from_file.py --force-interpreter-binary dependent_requirements_${{ matrix.arch }} " - name: Build Python dependent wheels - ARMv7 Legacy (in Docker) @@ -171,7 +173,7 @@ jobs: bash os_dependencies/linux_arm.sh # Source Rust environment after installation . \$HOME/.cargo/env - python build_wheels_from_file.py dependent_requirements_${{ matrix.arch }} + python build_wheels_from_file.py --force-interpreter-binary dependent_requirements_${{ matrix.arch }} " - name: Build Python dependent wheels - Linux/macOS @@ -184,11 +186,11 @@ jobs: export ARCHFLAGS="-arch x86_64" fi - python build_wheels_from_file.py dependent_requirements_${{ matrix.arch }} + python build_wheels_from_file.py --force-interpreter-binary dependent_requirements_${{ matrix.arch }} - name: Build Python dependent wheels for ${{ matrix.python-version }} - Windows if: matrix.os == 'Windows' - run: python build_wheels_from_file.py dependent_requirements_${{ matrix.arch }} + run: python build_wheels_from_file.py --force-interpreter-binary dependent_requirements_${{ matrix.arch }} - name: Upload artifacts diff --git a/_helper_functions.py b/_helper_functions.py index c858feb..ea752e1 100644 --- a/_helper_functions.py +++ b/_helper_functions.py @@ -107,11 +107,23 @@ def get_no_binary_args(requirement_name: str) -> list: return [] +def _safe_text_for_stdout(text: str) -> str: + """Avoid UnicodeEncodeError when printing pip/tool output on Windows (e.g. cp1252 console).""" + encoding = getattr(sys.stdout, "encoding", None) or "utf-8" + if encoding.lower() in ("utf-8", "utf8"): + return text + try: + text.encode(encoding) + return text + except UnicodeEncodeError: + return text.encode(encoding, errors="replace").decode(encoding, errors="replace") + + def print_color(text: str, color: str = Fore.BLUE): """Print colored text specified by color argument based on colorama - default color BLUE """ - print(f"{color}", f"{text}", Style.RESET_ALL) + print(f"{color}", f"{_safe_text_for_stdout(text)}", Style.RESET_ALL) def merge_requirements(requirement: Requirement, another_req: Requirement) -> Requirement: diff --git a/build_wheels_from_file.py b/build_wheels_from_file.py index c4fcb7c..8b23210 100644 --- a/build_wheels_from_file.py +++ b/build_wheels_from_file.py @@ -3,16 +3,65 @@ # # SPDX-License-Identifier: Apache-2.0 # +from __future__ import annotations + import argparse import os +import platform import subprocess import sys from colorama import Fore +from packaging.requirements import InvalidRequirement +from packaging.requirements import Requirement +from packaging.utils import canonicalize_name from _helper_functions import get_no_binary_args from _helper_functions import print_color +# Do not pass --no-binary for these in --force-interpreter-binary mode: +# - sdists whose legacy setup breaks under PEP 517 isolation (pkg_resources in isolated env). +# - sdists that fail to compile on CI when a usable wheel exists (e.g. ruamel.yaml.clib + clang). +# - PyObjC: all pyobjc / pyobjc-framework-* use pyobjc_setup.py + pkg_resources (macOS). +# - cryptography: abi3 wheels; avoid PyO3 max-Python / heavy Rust rebuilds in dependent jobs. +# - pydantic-core: maturin + jiter + PyO3 can fail from sdist on some CI combos (e.g. ARM64 3.9: +# jiter vs pyo3-ffi PyUnicode_* / extract API). Prefer compatible wheels from find-links or PyPI. +_FORCE_INTERPRETER_BINARY_SKIP_EXACT = frozenset( + { + canonicalize_name("cryptography"), + canonicalize_name("pydantic-core"), + canonicalize_name("protobuf"), + canonicalize_name("ruamel.yaml.clib"), + } +) + + +def _force_interpreter_skip_package(canonical_dist_name: str) -> bool: + if canonical_dist_name in _FORCE_INTERPRETER_BINARY_SKIP_EXACT: + return True + # PyObjC meta and framework bindings (pyobjc-framework-corebluetooth, etc.) + return canonical_dist_name == "pyobjc" or canonical_dist_name.startswith("pyobjc-") + + +def _force_interpreter_no_binary_args(requirement_line: str) -> list[str]: + """Return pip --no-binary for this package so pip cannot reuse e.g. cp311-abi3 wheels on 3.13.""" + line = requirement_line.strip() + if not line: + return [] + try: + req = Requirement(line) + except InvalidRequirement: + return [] + if _force_interpreter_skip_package(canonicalize_name(req.name)): + return [] + return ["--no-binary", req.name] + + +def _apply_force_interpreter_binary(cli_flag: bool) -> bool: + """Linux/macOS only: forcing sdist builds for cryptography etc. is unreliable on Windows CI.""" + return cli_flag and platform.system() != "Windows" + + parser = argparse.ArgumentParser(description="Process build arguments.") parser.add_argument( "requirements_path", @@ -36,6 +85,16 @@ action="store_true", help="CI exclude-tests mode: fail if all wheels succeed (expect some to fail, e.g. excluded packages)", ) +parser.add_argument( + "--force-interpreter-binary", + action="store_true", + help=( + "For each requirement, pass --no-binary so pip builds a wheel for the current " + "interpreter instead of reusing a compatible abi3 / older cpXY wheel from --find-links. " + "Ignored on Windows (source builds for e.g. cryptography are not used in CI there). " + "Some packages are always skipped (e.g. cryptography, pydantic-core, protobuf, PyObjC, ruamel.yaml.clib)." + ), +) args = parser.parse_args() @@ -55,8 +114,16 @@ raise SystemExit(f"Python version dependent requirements directory or file not found ({e})") for requirement in requirements: + requirement = requirement.strip() + if not requirement or requirement.startswith("#"): + continue # Get no-binary args for packages that should be built from source no_binary_args = get_no_binary_args(requirement) + force_interpreter_args = ( + _force_interpreter_no_binary_args(requirement) + if _apply_force_interpreter_binary(args.force_interpreter_binary) + else [] + ) out = subprocess.run( [ @@ -64,13 +131,14 @@ "-m", "pip", "wheel", - f"{requirement}", + requirement, "--find-links", "downloaded_wheels", "--wheel-dir", "downloaded_wheels", ] - + no_binary_args, + + no_binary_args + + force_interpreter_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) @@ -100,6 +168,11 @@ for requirement in in_requirements: # Get no-binary args for packages that should be built from source no_binary_args = get_no_binary_args(requirement) + force_interpreter_args = ( + _force_interpreter_no_binary_args(requirement) + if _apply_force_interpreter_binary(args.force_interpreter_binary) + else [] + ) out = subprocess.run( [ @@ -113,7 +186,8 @@ "--wheel-dir", "downloaded_wheels", ] - + no_binary_args, + + no_binary_args + + force_interpreter_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) diff --git a/exclude_list.yaml b/exclude_list.yaml index a696df7..3278a6a 100644 --- a/exclude_list.yaml +++ b/exclude_list.yaml @@ -113,6 +113,11 @@ version: '<2.35.0' python: '>=3.14' +# pydantic_core: maturin sdist on CPython 3.14 + macOS/Windows fails in CI (PyO3 limited API); Linux 3.14 still built +- package_name: 'pydantic_core' + platform: ['darwin', 'win32'] + python: '==3.14' + # rpds_py supports Python 3.14 from version >= 0.26.0 (pyo3 compatibility) # https://pypi.org/project/rpds-py/#history - package_name: 'rpds_py' From 9b91cdf4c83f092c37e20edc217f082664599aa1 Mon Sep 17 00:00:00 2001 From: Jakub Kocka Date: Mon, 30 Mar 2026 09:46:41 +0200 Subject: [PATCH 5/6] fix: Fixed permissions to resolve non-root/root runners compatibility --- .github/workflows/build-wheels-platforms.yml | 4 ++++ .github/workflows/build-wheels-python-dependent.yml | 3 +++ 2 files changed, 7 insertions(+) diff --git a/.github/workflows/build-wheels-platforms.yml b/.github/workflows/build-wheels-platforms.yml index 6a48a5d..50965b1 100644 --- a/.github/workflows/build-wheels-platforms.yml +++ b/.github/workflows/build-wheels-platforms.yml @@ -181,6 +181,10 @@ jobs: if: matrix.os == 'Windows' run: python build_wheels.py + - name: Fix permissions on downloaded_wheels (ARMv7 Docker builds) + if: matrix.os == 'Linux ARMv7' || matrix.os == 'Linux ARMv7 Legacy' + run: sudo chown -R $USER:$USER ./downloaded_wheels + - name: Upload artifacts of downloaded_wheels directory uses: actions/upload-artifact@v4 with: diff --git a/.github/workflows/build-wheels-python-dependent.yml b/.github/workflows/build-wheels-python-dependent.yml index db6744a..89e0973 100644 --- a/.github/workflows/build-wheels-python-dependent.yml +++ b/.github/workflows/build-wheels-python-dependent.yml @@ -192,6 +192,9 @@ jobs: if: matrix.os == 'Windows' run: python build_wheels_from_file.py --force-interpreter-binary dependent_requirements_${{ matrix.arch }} + - name: Fix permissions on downloaded_wheels (ARMv7 Docker builds) + if: matrix.os == 'Linux ARMv7' || matrix.os == 'Linux ARMv7 Legacy' + run: sudo chown -R $USER:$USER ./downloaded_wheels - name: Upload artifacts uses: actions/upload-artifact@v4 From c08dd38c835bc3bae5429d507410d979367f638b Mon Sep 17 00:00:00 2001 From: Jakub Kocka Date: Mon, 30 Mar 2026 14:10:40 +0200 Subject: [PATCH 6/6] fix(exclude): correct platform+python intersection markers in YAMLListAdapter exclude_list rows with platform and python but no package version were turned into an AND of inverted markers, which incorrectly dropped the dependency on other OSes for the same Python (e.g. Linux 3.14 when excluding Windows/macOS 3.14). Emit De Morgan form (sys_platform != p or ) per platform instead. Added pydantic_core for win32/darwin on Python 3.14 to avoid broken maturin sdists; remove the temporary skip from build_wheels_from_file. Factor python_version marker building into _python_version_marker_fragment_no_package_version and add unit tests. Update the architecture skill note accordingly. --- exclude_list.yaml | 4 ++-- test_build_wheels.py | 16 +++++++++++++++ yaml_list_adapter.py | 49 ++++++++++++++++++++++++++------------------ 3 files changed, 47 insertions(+), 22 deletions(-) diff --git a/exclude_list.yaml b/exclude_list.yaml index 3278a6a..badb878 100644 --- a/exclude_list.yaml +++ b/exclude_list.yaml @@ -113,9 +113,9 @@ version: '<2.35.0' python: '>=3.14' -# pydantic_core: maturin sdist on CPython 3.14 + macOS/Windows fails in CI (PyO3 limited API); Linux 3.14 still built +# pydantic_core on CPython 3.14 + Windows/macOS: maturin sdist fails (PyO3 limited API / abi3). Linux 3.14 OK. - package_name: 'pydantic_core' - platform: ['darwin', 'win32'] + platform: ['win32', 'darwin'] python: '==3.14' # rpds_py supports Python 3.14 from version >= 0.26.0 (pyo3 compatibility) diff --git a/test_build_wheels.py b/test_build_wheels.py index be1f04d..23a934f 100644 --- a/test_build_wheels.py +++ b/test_build_wheels.py @@ -122,6 +122,22 @@ def test_exclude_version(self): result = self.adapter._yaml_to_requirement(yaml_list, exclude=True) self.assertEqual(result, {Requirement("numpy>=1.20")}) + def test_exclude_platform_and_python_intersection_single_os(self): + """exclude + platform + python (no package version) = drop only on that OS ∩ Python.""" + yaml_list = [{"package_name": "pydantic_core", "platform": "win32", "python": "==3.14"}] + result = self.adapter._yaml_to_requirement(yaml_list, exclude=True) + expected = Requirement('pydantic_core; (sys_platform != "win32" or (python_version != "3.14"))') + self.assertEqual(result, {expected}) + + def test_exclude_platform_and_python_intersection_two_os(self): + yaml_list = [{"package_name": "pydantic_core", "platform": ["win32", "darwin"], "python": "==3.14"}] + result = self.adapter._yaml_to_requirement(yaml_list, exclude=True) + expected = Requirement( + 'pydantic_core; (sys_platform != "win32" or (python_version != "3.14")) and ' + '(sys_platform != "darwin" or (python_version != "3.14"))' + ) + self.assertEqual(result, {expected}) + class TestYAMLListAdapterIntegration(unittest.TestCase): """Integration tests using actual YAML files.""" diff --git a/yaml_list_adapter.py b/yaml_list_adapter.py index 5cc3ee8..ebf3360 100644 --- a/yaml_list_adapter.py +++ b/yaml_list_adapter.py @@ -138,11 +138,28 @@ def _change_specifier_logic(self, spec_with_text: str) -> tuple: break return (new_ver_spec, text, ver_specifier) + def _python_version_marker_fragment_no_package_version(self, package_python, exclude: bool) -> str: + """Build ``python_version ...`` marker fragment from YAML ``python`` when there is no package ``version``.""" + if not isinstance(package_python, list): + new_spec, text_after, old_spec = self._change_specifier_logic(package_python) + spec = new_spec if exclude else old_spec + return f"python_version {spec} '{text_after}'" + parts = [] + for elem in package_python: + new_spec, text_after, old_spec = self._change_specifier_logic(elem) + spec = new_spec if exclude else old_spec + parts.append(f"python_version {spec} '{text_after}'") + return " and ".join(parts) + def _yaml_to_requirement(self, yaml: list, exclude: bool = False) -> set: """Converts YAML defined requirement into packaging.requirements Requirement which can be directly used with pip. Markers (platform and python) are ANDed between and multiple values of the marker are ORed between. + For exclude=True **without** a package ``version``, platform + python mean “exclude on this OS **and** + this Python” (intersection): the keep-marker is ``(sys_platform != p or )`` per + platform, ANDed across listed platforms (De Morgan). Rows **with** a package version keep the + split-requirement behaviour documented below. When exclude is set to True, the logic of the Requirement is changed to be excluded by pip. To preserve the logic, another requirement needs to be added @@ -187,6 +204,15 @@ def _yaml_to_requirement(self, yaml: list, exclude: bool = False) -> set: package_platform = "" package_python = package["python"] if "python" in package else "" + # Intersection exclude: "drop on (platform ∈ P) ∧ (python matches)" without a package version. + # Previous AND of inverted markers wrongly dropped e.g. Linux + same Python. + if exclude and package_platform and package_python and not package_version: + py_frag = self._python_version_marker_fragment_no_package_version(package_python, exclude=True) + plfs = list(package_platform) if isinstance(package_platform, list) else [package_platform] + terms = [f"(sys_platform != '{plf}' or ({py_frag}))" for plf in plfs] + requirements_set.add(Requirement(f"{package['package_name']}; " + " and ".join(terms))) + continue + requirement_str_list = [f"{package['package_name']}"] # if package has version specifier, process it and add to the requirement @@ -254,26 +280,9 @@ def _yaml_to_requirement(self, yaml: list, exclude: bool = False) -> set: # if package has python markers defined, add it to the requirement if package_python and not package_version: - if not isinstance(package_python, list): - new_spec, text_after, old_spec = self._change_specifier_logic(package_python) - requirement_str_list.append( - ( - f"python_version {new_spec} '{text_after}'" - if exclude - else f"python_version {old_spec} '{text_after}'" - ) - ) - - else: # list of python versions defined - python_list = [] - for elem in package_python: - new_spec, text_after, old_spec = self._change_specifier_logic(elem) - if exclude: - python_list.append(f"python_version {new_spec} '{text_after}'") - else: - python_list.append(f"python_version {old_spec} '{text_after}'") - - requirement_str_list.append(" and ".join(python_list)) + requirement_str_list.append( + self._python_version_marker_fragment_no_package_version(package_python, exclude) + ) if package_python and package_version: if not isinstance(package_python, list):