Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions .github/workflows/build-wheels-defined.yml
Original file line number Diff line number Diff line change
Expand Up @@ -254,10 +254,13 @@ jobs:
-w /work \
-e GH_TOKEN="${GH_TOKEN}" \
-e PIP_NO_CACHE_DIR=1 \
-e PIP_INDEX_URL=https://www.piwheels.org/simple \
-e PIP_EXTRA_INDEX_URL=https://pypi.org/simple \
python:${{ matrix.python-version }}-bookworm \
bash -c "
set -e
python --version
source os_dependencies/linux_armv7_docker_prepare.sh
# Install pip packages without cache to reduce memory usage
python -m pip install --no-cache-dir --upgrade pip
python -m pip install --no-cache-dir -r build_requirements.txt
Expand Down Expand Up @@ -339,10 +342,13 @@ jobs:
-w /work \
-e GH_TOKEN="${GH_TOKEN}" \
-e PIP_NO_CACHE_DIR=1 \
-e PIP_INDEX_URL=https://www.piwheels.org/simple \
-e PIP_EXTRA_INDEX_URL=https://pypi.org/simple \
python:${{ matrix.python-version }}-bullseye \
bash -c "
set -e
python --version
source os_dependencies/linux_armv7_docker_prepare.sh
# Install pip packages without cache to reduce memory usage
python -m pip install --no-cache-dir --upgrade pip
python -m pip install --no-cache-dir -r build_requirements.txt
Expand Down
6 changes: 6 additions & 0 deletions .github/workflows/build-wheels-platforms.yml
Original file line number Diff line number Diff line change
Expand Up @@ -127,10 +127,13 @@ jobs:
-e MIN_IDF_MINOR_VERSION=${{ needs.get-supported-versions.outputs.min_idf_minor_version }} \
-e GH_TOKEN="${GH_TOKEN}" \
-e PIP_NO_CACHE_DIR=1 \
-e PIP_INDEX_URL=https://www.piwheels.org/simple \
-e PIP_EXTRA_INDEX_URL=https://pypi.org/simple \
python:${{ matrix.python-version }}-bookworm \
bash -c "
set -e
python --version
source os_dependencies/linux_armv7_docker_prepare.sh
# Install pip packages without cache to reduce memory usage
python -m pip install --no-cache-dir --upgrade pip
python -m pip install --no-cache-dir -r build_requirements.txt
Expand All @@ -152,10 +155,13 @@ jobs:
-e MIN_IDF_MINOR_VERSION=${{ needs.get-supported-versions.outputs.min_idf_minor_version }} \
-e GH_TOKEN="${GH_TOKEN}" \
-e PIP_NO_CACHE_DIR=1 \
-e PIP_INDEX_URL=https://www.piwheels.org/simple \
-e PIP_EXTRA_INDEX_URL=https://pypi.org/simple \
python:${{ matrix.python-version }}-bullseye \
bash -c "
set -e
python --version
source os_dependencies/linux_armv7_docker_prepare.sh
# Install pip packages without cache to reduce memory usage
python -m pip install --no-cache-dir --upgrade pip
python -m pip install --no-cache-dir -r build_requirements.txt
Expand Down
6 changes: 6 additions & 0 deletions .github/workflows/build-wheels-python-dependent.yml
Original file line number Diff line number Diff line change
Expand Up @@ -140,10 +140,13 @@ jobs:
-e PYO3_USE_ABI3_FORWARD_COMPATIBILITY=1 \
-e GH_TOKEN="${GH_TOKEN}" \
-e PIP_NO_CACHE_DIR=1 \
-e PIP_INDEX_URL=https://www.piwheels.org/simple \
-e PIP_EXTRA_INDEX_URL=https://pypi.org/simple \
python:${{ matrix.python-version }}-bookworm \
bash -c "
set -e
python --version
source os_dependencies/linux_armv7_docker_prepare.sh
# Install pip packages without cache to reduce memory usage
python -m pip install --no-cache-dir --upgrade pip
python -m pip install --no-cache-dir -r build_requirements.txt
Expand All @@ -163,10 +166,13 @@ jobs:
-e PYO3_USE_ABI3_FORWARD_COMPATIBILITY=1 \
-e GH_TOKEN="${GH_TOKEN}" \
-e PIP_NO_CACHE_DIR=1 \
-e PIP_INDEX_URL=https://www.piwheels.org/simple \
-e PIP_EXTRA_INDEX_URL=https://pypi.org/simple \
python:${{ matrix.python-version }}-bullseye \
bash -c "
set -e
python --version
source os_dependencies/linux_armv7_docker_prepare.sh
# Install pip packages without cache to reduce memory usage
python -m pip install --no-cache-dir --upgrade pip
python -m pip install --no-cache-dir -r build_requirements.txt
Expand Down
6 changes: 6 additions & 0 deletions .github/workflows/test-wheels-install.yml
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,9 @@ jobs:
-w /work \
python:${{ matrix.python-version }}-bookworm \
bash -c "
set -e
python --version
source os_dependencies/linux_armv7_docker_prepare.sh
python -m pip install --upgrade pip
pip install -r build_requirements.txt
python test_wheels_install.py
Expand All @@ -107,12 +109,16 @@ jobs:
-w /work \
python:${{ matrix.python-version }}-bullseye \
bash -c "
set -e
python --version
source os_dependencies/linux_armv7_docker_prepare.sh
python -m pip install --upgrade pip
pip install -r build_requirements.txt
python test_wheels_install.py
"

# After test_wheels_install.py, ./downloaded_wheels contains only wheels for this
# matrix Python + platform (see prune step in test_wheels_install.main).
- name: Upload tested wheels
uses: actions/upload-artifact@v4
with:
Expand Down
26 changes: 23 additions & 3 deletions .github/workflows/wheels-repair.yml
Original file line number Diff line number Diff line change
Expand Up @@ -161,6 +161,9 @@ jobs:
run: |
docker run --rm \
--platform ${{ matrix.docker_platform }} \
-e AUDITWHEEL_PLAT=manylinux_2_36_armv7l \
-e AUDITWHEEL_ONLY_PLAT=1 \
-e AUDITWHEEL_ALLOW_LINUX_TAG=1 \
-v $(pwd):/work \
-w /work \
${{ matrix.docker_image }} \
Expand All @@ -177,6 +180,9 @@ jobs:
run: |
docker run --rm \
--platform ${{ matrix.docker_platform }} \
-e AUDITWHEEL_PLAT=manylinux_2_31_armv7l \
-e AUDITWHEEL_ONLY_PLAT=1 \
-e AUDITWHEEL_ALLOW_LINUX_TAG=1 \
-v $(pwd):/work \
-w /work \
${{ matrix.docker_image }} \
Expand All @@ -201,12 +207,26 @@ jobs:
needs: repair-wheels
runs-on: ubuntu-latest
steps:
- name: Download all repaired wheels
- name: Checkout repository
uses: actions/checkout@v4

# Download each wheels-repaired-* artifact into its own subdirectory so
# same-named wheels from ARMv7 vs ARMv7 Legacy are not silently overwritten
# before collision detection or S3 upload (see README: ARMv7 wheel collisions).
- name: Download all repaired wheels (per-artifact subdirectories)
uses: actions/download-artifact@v4
with:
pattern: wheels-repaired-*
path: ./all_wheels
merge-multiple: true
path: ./all_wheels_staging
merge-multiple: false

- name: Check for duplicate wheel basenames across lineages
run: python3 check_wheel_collisions.py ./all_wheels_staging

- name: Flatten merged wheels directory
run: |
mkdir -p ./all_wheels
find ./all_wheels_staging -type f -name '*.whl' -exec cp -f {} ./all_wheels/ \;

- name: List merged wheels
run: |
Expand Down
25 changes: 25 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -146,6 +146,18 @@ The repair tools are used after build to link and bundle all the needed librarie

This logic is done by the [repair workflow](./.github/workflows/wheels-repair.yml) and the [`repair_wheels.py` script](./repair_wheels.py)

### ARMv7 vs ARMv7 Legacy: same wheel filename, different binaries

`Linux ARMv7` and `Linux ARMv7 Legacy` can both produce a wheel whose **filename is identical** (same PEP 425 tags) while the **ELF contents differ** (different glibc/OpenSSL/Rust toolchain lineage). **Note:** `wheels-download-directory-*` CI artifacts are the **pre-repair** build outputs; comparing those can still show identical names until the [repair workflow](./.github/workflows/wheels-repair.yml) runs. Two bad outcomes follow if that is not handled after repair/merge:

1. **Artifact merge / local flatten** — downloading multiple `wheels-repaired-*` artifacts into one directory with `merge-multiple: true` can make the second file **silently overwrite** the first on disk before any upload runs.
2. **S3 upload** — [`upload_wheels.py`](./upload_wheels.py) publishes to `pypi/<package>/<wheel-filename>`. Uploading a second wheel with the **same key** replaces the object; clients then see whichever build ran last, which can surface as import crashes or segfaults.

Mitigations in this repo:

- Repair sets **`AUDITWHEEL_PLAT`** and **`AUDITWHEEL_ONLY_PLAT`** per lineage (`manylinux_2_36_armv7l` vs `manylinux_2_31_armv7l`) so [`repair_wheels.py`](./repair_wheels.py) runs `auditwheel repair --plat ... --only-plat` and emitted wheels get **distinct single-tag filenames** when auditwheel supports it. If **`AUDITWHEEL_PLAT` is set**, ARMv7 “libc detection failed” outcomes are **not** treated as non-fatal skips (that would leave identical filenames across lineages).
- The repair workflow merges repaired artifacts using **per-artifact subdirectories**, then runs [`check_wheel_collisions.py`](./check_wheel_collisions.py) to **fail CI** if the same `*.whl` basename appears with **different contents** across lineages, before flattening for tests/upload.

## Activity Diagram
The main file is `build-wheels-platforms.yml` which is scheduled to run periodically to build Python wheels for any requirement of all [ESP-IDF]-supported versions.

Expand All @@ -167,4 +179,17 @@ Docker files are in its own repository where there are build and published from.
- For older ARMv7 operating systems
- For packages requiring glibc 2.31

[!NOTE]
### ARMv7: prefer piwheels for resolution

For ARMv7 (and ARMv7 Legacy) environments, you may want to prefer [piwheels](https://www.piwheels.org/) as the primary index and use Espressif's index as a secondary source:

```bash
python -m pip install --index-url https://www.piwheels.org/simple --extra-index-url https://dl.espressif.com/pypi/ <package>
```

This repository's ARMv7 CI workflows also set these as `PIP_INDEX_URL` / `PIP_EXTRA_INDEX_URL` inside the ARMv7 Docker builds.

**Warning:** piwheels wheels may rely on system-provided shared libraries (i.e. may not bundle `.libs/`). If a target OS is missing those libraries or has an incompatible version, imports may fail at runtime.

[ESP-IDF]: https://github.com/espressif/esp-idf
33 changes: 19 additions & 14 deletions _helper_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,19 +37,10 @@
from packaging.version import Version
from packaging.version import parse as parse_version

# Packages that should be built from source on Linux to ensure correct library linking
# These packages often have pre-built wheels on PyPI that link against different library versions
# NOTE: This only applies to Linux (especially ARM) - Windows and macOS pre-built wheels work fine
# NOTE: Do NOT add packages with Rust components (cryptography, pynacl, bcrypt) here
# as they have complex build requirements and may not support all Python versions
FORCE_SOURCE_BUILD_PACKAGES_LINUX = [
"cffi",
"pillow",
"pyyaml",
"brotli",
"greenlet",
"bitarray",
]
# Linux ``--no-binary`` names: one per line in ``force_no_binary_linux.txt`` (also ``PIP_NO_BINARY`` in ARMv7 Docker).

_REPO_ROOT = Path(__file__).resolve().parent
FORCE_NO_BINARY_LINUX_FILE = "force_no_binary_linux.txt"

EXCLUDE_LIST_PATH = "exclude_list.yaml"

Expand Down Expand Up @@ -255,6 +246,20 @@ def exclude_entry_applies_to_platform(entry: dict, current_platform: str) -> boo
return False


def load_force_no_binary_linux_names(repo_root: Path | None = None) -> list[str]:
"""Package names for Linux ``--no-binary`` / ``PIP_NO_BINARY`` (``force_no_binary_linux.txt``)."""
root = repo_root if repo_root is not None else _REPO_ROOT
path = root / FORCE_NO_BINARY_LINUX_FILE
out: list[str] = []
for line in path.read_text(encoding="utf-8").splitlines():
line = line.split("#", 1)[0].strip()
if line:
out.append(line)
if not out:
raise ValueError(f"{path}: need at least one non-comment package name")
return out


def get_no_binary_args(requirement_name: str) -> list:
"""Get --no-binary arguments if this package should be built from source.

Expand All @@ -277,7 +282,7 @@ def get_no_binary_args(requirement_name: str) -> list:
return []
pkg_name = match.group(1).lower().replace("-", "_")

for pkg in FORCE_SOURCE_BUILD_PACKAGES_LINUX:
for pkg in load_force_no_binary_linux_names(_REPO_ROOT):
if pkg.lower().replace("-", "_") == pkg_name:
return ["--no-binary", match.group(1)]
return []
Expand Down
78 changes: 78 additions & 0 deletions check_wheel_collisions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
#
# SPDX-FileCopyrightText: 2026 Espressif Systems (Shanghai) CO LTD
#
# SPDX-License-Identifier: Apache-2.0
#
"""Detect duplicate *.whl basenames with different file contents under a tree.

Used after downloading per-arch ``wheels-repaired-*`` artifacts into separate
subdirectories (``merge-multiple: false``) so a filesystem flatten step cannot
hide ARMv7 vs ARMv7 Legacy collisions before upload to S3.
"""

from __future__ import annotations

import hashlib
import sys

from collections import defaultdict
from pathlib import Path


def _sha256_file(path: Path, chunk_size: int = 1024 * 1024) -> str:
h = hashlib.sha256()
with path.open("rb") as f:
while True:
b = f.read(chunk_size)
if not b:
break
h.update(b)
return h.hexdigest()


def collect_collision_errors(root: Path) -> list[str]:
"""Return human-readable error lines; empty if OK."""
wheels: list[Path] = []
for p in sorted(root.rglob("*.whl")):
if p.is_file():
wheels.append(p)

by_name: defaultdict[str, list[Path]] = defaultdict(list)
for p in wheels:
by_name[p.name].append(p)

errors: list[str] = []
for name, paths in sorted(by_name.items()):
if len(paths) < 2:
continue
by_digest: defaultdict[str, list[Path]] = defaultdict(list)
for p in paths:
by_digest[_sha256_file(p)].append(p)
if len(by_digest) == 1:
# Identical content in multiple artifact trees — unusual but safe.
continue
lines = [f"Duplicate wheel basename with different contents: {name}"]
for p in paths:
lines.append(f" - {p} sha256={_sha256_file(p)}")
errors.append("\n".join(lines))
return errors


def main(argv: list[str]) -> int:
root = Path(argv[1] if len(argv) > 1 else ".").resolve()
if not root.is_dir():
print(f"Error: not a directory: {root}", file=sys.stderr)
return 2

errors = collect_collision_errors(root)
if errors:
print("Wheel basename collision check failed:\n", file=sys.stderr)
for block in errors:
print(block, file=sys.stderr)
print(file=sys.stderr)
return 1
return 0


if __name__ == "__main__":
raise SystemExit(main(sys.argv))
9 changes: 9 additions & 0 deletions force_no_binary_linux.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
# Linux: pip PIP_NO_BINARY / per-wheel --no-binary (ARMv7 Docker + build_wheels on Linux)
cffi
# PyPI maturin manylinux wheels need newer glibc than Debian bookworm/bullseye armhf; sdist uses setuptools-rust.
maturin
pillow
pyyaml
brotli
greenlet
bitarray
4 changes: 3 additions & 1 deletion include_list.yaml
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
# List of Python packages to additionally include to the automatically assembled requirements
#"For assembled <main requirements> additionally include <package_name> with <version> on <platform> for <python> version".
# "For assembled <main requirements> additionally include <package_name> with <version> on <platform> for <python> version".
#
# Linux packages to build from wheels (pip --no-binary): see force_no_binary_linux.txt

# include_list template
#- package_name: '<name_of_package>'
Expand Down
9 changes: 9 additions & 0 deletions os_dependencies/linux_arm.sh
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,15 @@ if [ "$arch" == "armv7l" ]; then
apt remove --auto-remove --purge rust-gdb rustc libstd-rust-dev libstd-rust-1.48 2>/dev/null || true
# install Rust dependencies
apt-get install -y libssl-dev libffi-dev gcc musl-dev
# Match linux_armv7_docker_prepare.sh: Bookworm armhf has libffi.so.8 only; binary cffi wheels
# may still dlopen libffi.so.7 (pip build isolation, piwheels/PyPI manylinux tags).
for _ffi_libdir in /usr/lib/arm-linux-gnueabihf /usr/lib/arm-linux-gnueabi; do
if [ -d "$_ffi_libdir" ] && [ -f "$_ffi_libdir/libffi.so.8" ] && [ ! -e "$_ffi_libdir/libffi.so.7" ]; then
ln -sfn libffi.so.8 "$_ffi_libdir/libffi.so.7"
ldconfig 2>/dev/null || true
break
fi
done
# install Rust
curl --proto '=https' --tlsv1.3 -sSf https://sh.rustup.rs | bash -s -- -y
. $HOME/.cargo/env
Expand Down
Loading
Loading