[v0.1.3] 2025-09-22 #154
Workflow file for this run
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: Build Wheels | |
| on: | |
| push: | |
| tags: ['v*','testing-v*'] | |
| branches: [master] | |
| workflow_dispatch: | |
| jobs: | |
| build_wheels: | |
| name: Build wheels on ${{ matrix.os }} | |
| runs-on: ${{ matrix.os }} | |
| timeout-minutes: 45 | |
| strategy: | |
| matrix: | |
| include: | |
| - os: ubuntu-22.04 | |
| arch: x86_64 | |
| - os: windows-2022 | |
| arch: AMD64 | |
| - os: macos-13 | |
| arch: x86_64 | |
| - os: macos-14 | |
| arch: arm64 | |
| steps: | |
| - name: Checkout PyHelios | |
| uses: actions/checkout@v4 | |
| with: | |
| submodules: recursive | |
| fetch-depth: 0 # Required for setuptools-scm | |
| # Ensure all tags are fetched | |
| fetch-tags: true | |
| - name: Debug version detection | |
| shell: bash | |
| run: | | |
| echo "=== Git tag information ===" | |
| git tag --list | head -10 | |
| echo "Current HEAD: $(git rev-parse HEAD)" | |
| echo "Describe: $(git describe --tags --always --dirty)" | |
| echo "=== Setuptools-scm version detection ===" | |
| python -m pip install setuptools-scm | |
| python -c "from setuptools_scm import get_version; print(f'Detected version: {get_version()}')" || echo "Version detection failed" | |
| - name: Set up Python | |
| uses: actions/setup-python@v5 | |
| with: | |
| python-version: '3.11' # Updated for cibuildwheel 3.x compatibility | |
| - name: Setup MSVC (Windows) | |
| if: runner.os == 'Windows' | |
| uses: ilammy/msvc-dev-cmd@v1 | |
| - name: Install Helios dependencies (macOS) | |
| if: runner.os == 'macOS' | |
| run: | | |
| cd helios-core/utilities | |
| # Install base + visualization dependencies (no GPU/CUDA for macOS builds) | |
| bash dependencies.sh BASE | |
| bash dependencies.sh VIS | |
| - name: Debug environment (macOS) | |
| if: runner.os == 'macOS' | |
| run: | | |
| echo "=== Directory structure ===" | |
| ls -la | |
| echo "=== PyHelios build scripts ===" | |
| ls -la build_scripts/ | |
| echo "=== Helios core ===" | |
| ls -la helios-core/ || echo "helios-core not found" | |
| echo "=== Python version and location ===" | |
| python --version | |
| which python | |
| echo "=== Environment ===" | |
| env | grep -E "(PYTHON|PATH)" | head -10 | |
| - name: Install CUDA Toolkit (Windows) | |
| if: runner.os == 'Windows' | |
| id: cuda | |
| uses: Jimver/cuda-toolkit@v0.2.24 | |
| with: | |
| cuda: '12.6.2' | |
| method: network | |
| # Version 0.2.24 works correctly on Windows | |
| use-github-cache: true | |
| use-local-cache: true | |
| log-file-suffix: log.txt | |
| - name: Validate CUDA presence (Windows) | |
| if: runner.os == 'Windows' | |
| shell: cmd | |
| run: | | |
| echo CUDA_PATH=%CUDA_PATH% | |
| where nvcc | |
| nvcc -V | |
| if not exist "%CUDA_PATH%\lib\x64\cudart.lib" (echo cudart.lib missing & exit /b 1) | |
| - name: Install cibuildwheel and repair tools | |
| shell: bash | |
| run: | | |
| python -m pip install --upgrade pip | |
| python -m pip install 'cibuildwheel>=2.23.0' | |
| # Install platform-specific wheel repair tools | |
| if [ "${{ runner.os }}" = "Linux" ]; then | |
| python -m pip install auditwheel | |
| elif [ "${{ runner.os }}" = "macOS" ]; then | |
| python -m pip install delocate | |
| fi | |
| - name: Debug version detection | |
| shell: bash | |
| run: | | |
| echo "=== Version Detection Debug ===" | |
| echo "Git describe: $(git describe --tags --long --dirty)" | |
| echo "Git tags:" | |
| git tag -l --sort=-version:refname | head -10 | |
| echo "Git log (recent commits):" | |
| git log --oneline --decorate -5 | |
| pip install setuptools-scm | |
| echo "setuptools-scm version: $(python -m setuptools_scm)" | |
| echo "================================" | |
| - name: Free disk space for CUDA installation (Linux only) | |
| if: runner.os == 'Linux' | |
| uses: jlumbroso/free-disk-space@main | |
| with: | |
| # Free up ~31GB in 3 minutes - essential for CUDA toolkit installation | |
| android: true # Frees ~14GB Android SDK/NDK | |
| dotnet: false # Frees ~2.7GB .NET runtime | |
| haskell: false # Frees ~5.2GB Haskell toolchain | |
| large-packages: false # Frees ~5.3GB various large packages | |
| tool-cache: false # Keep Python/Node tools needed for build | |
| swap-storage: false # Frees ~4GB swap space | |
| - name: Build manylinux CUDA shim (adds working /bin/sh and /usr/bin/sh) | |
| if: runner.os == 'Linux' | |
| shell: bash | |
| run: | | |
| cat > Dockerfile.ci <<'EOF' | |
| FROM sameli/manylinux2014_x86_64_cuda_11.8:latest | |
| # Ensure bash is present | |
| RUN yum install -y bash coreutils && \ | |
| # Provide sh in both canonical locations that various tools try first | |
| if [ ! -e /bin/sh ]; then ln -s /bin/bash /bin/sh; fi && \ | |
| if [ ! -e /usr/bin/sh ]; then ln -s /bin/bash /usr/bin/sh; fi && \ | |
| # Make sure containers launched without a login shell still see /bin and /usr/bin | |
| echo 'PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin' > /etc/environment | |
| EOF | |
| docker build -t pyhelios/manylinux2014_x86_64_cuda_11.8_shim -f Dockerfile.ci . | |
| # Prove sh resolves in the built image before we hand it to cibuildwheel | |
| docker run --rm pyhelios/manylinux2014_x86_64_cuda_11.8_shim /usr/bin/env sh -c 'echo "[shim] sh works: $0"' | |
| docker run --rm pyhelios/manylinux2014_x86_64_cuda_11.8_shim sh -c 'echo "[shim] PATH=$PATH"; command -v sh; ls -l /bin/sh /usr/bin/sh' | |
| - name: Build wheels | |
| run: python -m cibuildwheel --output-dir wheelhouse | |
| timeout-minutes: 30 # Single timeout for the entire step | |
| env: | |
| # Build for Python 3.8+ on all platforms | |
| CIBW_BUILD: cp38-* cp39-* cp310-* cp311-* cp312-* | |
| # Fail fast on build errors instead of continuing to next Python version | |
| CIBW_BUILD_VERBOSITY: 1 | |
| # Skip 32-bit builds and Python 3.8 on ARM64 due to cross-compilation issues | |
| # Python 3.8 lacks official ARM64 support, causing OpenMP linking issues in cibuildwheel | |
| CIBW_SKIP: "*-win32 *-manylinux_i686 *-musllinux* cp38-macosx_arm64" | |
| # Architecture configuration based on runner | |
| CIBW_ARCHS: ${{ matrix.arch }} | |
| # Use stable manylinux images to avoid gateway timeout issues | |
| CIBW_MANYLINUX_X86_64_IMAGE: pyhelios/manylinux2014_x86_64_cuda_11.8_shim | |
| CIBW_MANYLINUX_AARCH64_IMAGE: manylinux2014 | |
| # Platform-specific build commands with explicit plugin selection for consistency | |
| # macOS: Include visualizer but exclude GPU plugins due to cross-compilation issues | |
| CIBW_BEFORE_BUILD_MACOS: | | |
| if [ ! -f pyhelios_build/build/lib/libhelios.dylib ]; then | |
| echo "[macOS] First ABI: building native libs..." | |
| python build_scripts/prepare_wheel.py \ | |
| --buildmode release \ | |
| --nogpu \ | |
| --verbose \ | |
| --clean \ | |
| --cmake-args=-DENABLE_OPENMP=OFF \ | |
| --cmake-args=-DCMAKE_IGNORE_PATH=/opt/homebrew \ | |
| --cmake-args=-DCMAKE_IGNORE_PREFIX_PATH=/opt/homebrew \ | |
| --cmake-args=-DCMAKE_SYSTEM_IGNORE_PATH=/opt/homebrew | |
| else | |
| echo "[macOS] Reusing previously built native libs; skipping rebuild." | |
| fi | |
| CIBW_ENVIRONMENT_MACOS: "MACOSX_DEPLOYMENT_TARGET=${{ matrix.arch == 'x86_64' && '13.0' || '14.0' }} CMAKE_OSX_ARCHITECTURES=${{ matrix.arch }} SYSTEM_VERSION_COMPAT=0" | |
| # Ensure CUDA is visible inside cibuildwheel's Windows build environment | |
| CIBW_ENVIRONMENT_PASS_WINDOWS: "CUDA_PATH CUDA_HOME" | |
| CIBW_BEFORE_BUILD_WINDOWS: > | |
| echo "=== CIBW_BEFORE_BUILD_WINDOWS Started ===" && | |
| set "PATH=%CUDA_PATH%\bin;%PATH%" && | |
| echo "CUDA_PATH: %CUDA_PATH%" && | |
| echo "PATH updated with CUDA bin directory" && | |
| (where nvcc || echo "nvcc not found in PATH") && | |
| (nvcc -V || echo "nvcc -V failed") && | |
| set "CMAKE_RC_COMPILER=" && | |
| set "CMAKE_CUDA_COMPILER=%CUDA_PATH%\bin\nvcc.exe" && | |
| set "PYHELIOS_CUDA_ARCHITECTURES=50;60;70;75;80;86;90" && | |
| echo "Environment variables set, starting native build..." && | |
| python build_scripts/prepare_wheel.py --buildmode release --verbose && | |
| echo "=== CIBW_BEFORE_BUILD_WINDOWS Completed ===" | |
| CIBW_SHELL_LINUX: "/usr/bin/bash -eo pipefail -c" | |
| CIBW_ENVIRONMENT_PASS_LINUX: "SHELL" | |
| CIBW_BEFORE_ALL_LINUX: > | |
| if [ -f /opt/rh/devtoolset-10/enable ]; then | |
| echo "Enabling devtoolset-10 for C++17 support" && source /opt/rh/devtoolset-10/enable | |
| elif [ -f /opt/rh/devtoolset-9/enable ]; then | |
| echo "Enabling devtoolset-9 for C++17 support" && source /opt/rh/devtoolset-9/enable | |
| elif [ -f /opt/rh/devtoolset-8/enable ]; then | |
| echo "Enabling devtoolset-8 for C++17 support" && source /opt/rh/devtoolset-8/enable | |
| else | |
| echo "ERROR: No devtoolset found - C++17 compilation will fail" | |
| echo "Available devtoolsets:" && ls -la /opt/rh/ | grep devtoolset || echo "None found" | |
| echo "System GCC version: $(gcc --version | head -1)" | |
| if ! gcc --version | grep -q -E "(9\.|10\.|11\.|12\.|13\.|14\.)" && ! gcc --version | grep -q "8\.[3-9]"; then | |
| echo "FATAL: System GCC too old for C++17, devtoolset required" && exit 1 | |
| fi | |
| fi && | |
| echo "Active compiler: $(which gcc)" && | |
| echo "Compiler version: $(gcc --version | head -1)" && | |
| yum install -y zlib-devel mesa-libGL-devel mesa-libEGL-devel libX11-devel libXrandr-devel mesa-libGLU-devel libXinerama-devel libXcursor-devel libXi-devel libXxf86vm-devel && | |
| export PKG_CONFIG_PATH="/usr/lib64/pkgconfig:/usr/lib/pkgconfig:${PKG_CONFIG_PATH}" && | |
| export CMAKE_PREFIX_PATH="/usr:${CMAKE_PREFIX_PATH}" && | |
| /opt/python/cp39-cp39/bin/python build_scripts/prepare_wheel.py --buildmode release --verbose --clean | |
| # Per-ABI: do not rebuild; only stage/copy already built artifacts | |
| CIBW_BEFORE_BUILD_LINUX: > | |
| if [ -f /opt/rh/devtoolset-10/enable ]; then | |
| source /opt/rh/devtoolset-10/enable | |
| elif [ -f /opt/rh/devtoolset-9/enable ]; then | |
| source /opt/rh/devtoolset-9/enable | |
| elif [ -f /opt/rh/devtoolset-8/enable ]; then | |
| source /opt/rh/devtoolset-8/enable | |
| else | |
| echo "ERROR: No devtoolset found for per-ABI build" | |
| if ! gcc --version | grep -q -E "(9\.|10\.|11\.|12\.|13\.|14\.)" && ! gcc --version | grep -q "8\.[3-9]"; then | |
| echo "FATAL: System GCC too old for C++17" && exit 1 | |
| fi | |
| fi && | |
| export PKG_CONFIG_PATH="/usr/lib64/pkgconfig:/usr/lib/pkgconfig:${PKG_CONFIG_PATH}" && | |
| export CMAKE_PREFIX_PATH="/usr:${CMAKE_PREFIX_PATH}" && | |
| echo "Reusing native build; packaging only." && | |
| /opt/python/cp39-cp39/bin/python build_scripts/prepare_wheel.py --buildmode release --verbose | |
| # Manylinux-specific environment for zlib compatibility and OpenGL discovery | |
| # Use documented $PATH expansion to append devtoolset without overriding cibuildwheel's Python | |
| CIBW_ENVIRONMENT_LINUX: "SHELL=/usr/bin/bash PATH=$PATH:/opt/rh/devtoolset-10/root/usr/bin:/opt/rh/devtoolset-9/root/usr/bin:/opt/rh/devtoolset-8/root/usr/bin CC=/opt/rh/devtoolset-10/root/usr/bin/gcc CXX=/opt/rh/devtoolset-10/root/usr/bin/g++ CFLAGS='-D_GNU_SOURCE -I/usr/include' CXXFLAGS='-D_GNU_SOURCE -D_GLIBCXX_USE_CXX11_ABI=0 -I/usr/include' CMAKE_C_COMPILER=/opt/rh/devtoolset-10/root/usr/bin/gcc CMAKE_CXX_COMPILER=/opt/rh/devtoolset-10/root/usr/bin/g++ CMAKE_C_FLAGS='-D_GNU_SOURCE' CMAKE_CXX_FLAGS='-D_GNU_SOURCE -D_GLIBCXX_USE_CXX11_ABI=0' PKG_CONFIG_PATH=/usr/lib64/pkgconfig:/usr/lib/pkgconfig CMAKE_PREFIX_PATH=/usr" | |
| # Comprehensive wheel testing using pytest suite | |
| CIBW_TEST_COMMAND: | | |
| python -c " | |
| import sys, os | |
| print(f'=== cibuildwheel Test Environment ===') | |
| print(f'Python: {sys.executable}') | |
| print(f'Platform: {sys.platform}') | |
| print(f'Working directory: {os.getcwd()}') | |
| try: | |
| import pyhelios | |
| print(f'[SUCCESS] PyHelios3D {pyhelios.__version__} imported successfully') | |
| # Test native library functionality (most critical test) | |
| from pyhelios.plugins import get_plugin_info | |
| info = get_plugin_info() | |
| print(f'[SUCCESS] Platform: {info[\"platform\"]}') | |
| mock_mode = info.get('is_mock', True) | |
| print(f'[SUCCESS] Mock mode: {mock_mode}') | |
| if mock_mode: | |
| print('[FAILED] Mock mode detected - this violates fail-fast policy!') | |
| print('Mock mode indicates native libraries are not properly packaged in wheel.') | |
| print('This must be fixed before deployment.') | |
| raise RuntimeError('Mock mode detected in wheel testing - native libraries missing') | |
| else: | |
| lib_path = info.get('library_path', 'Unknown') | |
| print(f'[SUCCESS] Native library loaded: {lib_path}') | |
| # Asset validation (non-critical, allow failure) | |
| try: | |
| from pyhelios.assets import get_asset_manager | |
| manager = get_asset_manager() | |
| helios_build = manager._get_helios_build_path() | |
| if helios_build: | |
| print(f'[INFO] HELIOS_BUILD assets: {helios_build}') | |
| else: | |
| print('[INFO] HELIOS_BUILD assets not found (acceptable in wheel testing)') | |
| except Exception as e: | |
| print(f'[INFO] Asset validation skipped: {e}') | |
| print('[SUCCESS] cibuildwheel test completed successfully') | |
| except Exception as e: | |
| print(f'[FAILED] cibuildwheel test FAILED: {e}') | |
| import traceback | |
| traceback.print_exc() | |
| raise | |
| " | |
| # Platform-specific pytest commands (Windows doesn't support --forked) | |
| CIBW_TEST_COMMAND_LINUX: | | |
| python -c " | |
| import sys, os | |
| print(f'=== cibuildwheel Test Environment ===') | |
| print(f'Python: {sys.executable}') | |
| print(f'Platform: {sys.platform}') | |
| print(f'Working directory: {os.getcwd()}') | |
| try: | |
| import pyhelios | |
| print(f'[SUCCESS] PyHelios3D {pyhelios.__version__} imported successfully') | |
| # Test native library functionality (most critical test) | |
| from pyhelios.plugins import get_plugin_info | |
| info = get_plugin_info() | |
| print(f'[SUCCESS] Platform: {info[\"platform\"]}') | |
| mock_mode = info.get('is_mock', True) | |
| print(f'[SUCCESS] Mock mode: {mock_mode}') | |
| if mock_mode: | |
| print('[FAILED] Mock mode detected - this violates fail-fast policy!') | |
| print('Mock mode indicates native libraries are not properly packaged in wheel.') | |
| print('This must be fixed before deployment.') | |
| raise RuntimeError('Mock mode detected in wheel testing - native libraries missing') | |
| else: | |
| lib_path = info.get('library_path', 'Unknown') | |
| print(f'[SUCCESS] Native library loaded: {lib_path}') | |
| # Test Linux testable plugins including headless visualizer | |
| # Only GPU-accelerated plugins require special hardware not available in CI | |
| available_plugins = info.get('available_plugins', []) | |
| testable_plugins = ['weberpenntree', 'visualizer', 'photosynthesis', 'solarposition', 'stomatalconductance', 'plantarchitecture'] | |
| built_plugins = ['weberpenntree', 'visualizer', 'photosynthesis', 'solarposition', 'stomatalconductance', 'energybalance', 'radiation', 'plantarchitecture'] | |
| print(f'[INFO] Available plugins: {sorted(available_plugins)}') | |
| print(f'[INFO] Built plugins (expected): {sorted(built_plugins)}') | |
| print(f'[INFO] Testable plugins (in CI): {sorted(testable_plugins)}') | |
| # Check that we have at least the testable plugins | |
| missing_testable = set(testable_plugins) - set(available_plugins) | |
| if missing_testable: | |
| print(f'[FAILED] Missing testable plugins: {sorted(missing_testable)}') | |
| print('[INFO] Visualizer plugin requires headless OpenGL support in CI') | |
| raise RuntimeError(f'Missing testable plugins: {missing_testable}') | |
| else: | |
| print(f'[SUCCESS] All {len(testable_plugins)} testable plugins are available') | |
| # Report on GPU plugins (built but may not be testable in CI without GPU hardware) | |
| gpu_plugins = ['energybalance', 'radiation'] | |
| available_gpu = [p for p in gpu_plugins if p in available_plugins] | |
| if available_gpu: | |
| print(f'[INFO] GPU plugins available for testing: {sorted(available_gpu)}') | |
| else: | |
| print(f'[INFO] GPU plugins not available for CI testing (expected in containerized environment without GPU)') | |
| # Asset validation (non-critical, allow failure) | |
| try: | |
| from pyhelios.assets import get_asset_manager | |
| manager = get_asset_manager() | |
| helios_build = manager._get_helios_build_path() | |
| if helios_build: | |
| print(f'[INFO] HELIOS_BUILD assets: {helios_build}') | |
| else: | |
| print('[INFO] HELIOS_BUILD assets not found (acceptable in wheel testing)') | |
| except Exception as e: | |
| print(f'[INFO] Asset validation skipped: {e}') | |
| # Critical test: primitive data operations that fail in macOS CI | |
| print('[CRITICAL-TEST] Testing primitive data operations...') | |
| from pyhelios import Context | |
| ctx = Context() | |
| patch_uuid = ctx.addPatch() | |
| print(f'[CRITICAL-TEST] Created patch UUID: {patch_uuid}') | |
| ctx.setPrimitiveDataInt(patch_uuid, 'test_int', 42) | |
| print('[CRITICAL-TEST] setPrimitiveDataInt completed') | |
| exists = ctx.doesPrimitiveDataExist(patch_uuid, 'test_int') | |
| print(f'[CRITICAL-TEST] doesPrimitiveDataExist: {exists}') | |
| if not exists: | |
| print('[CRITICAL-TEST] REPRODUCED: Primitive data does not exist after setting!') | |
| print('[CRITICAL-TEST] This is the bug that causes macOS CI test failures') | |
| print('[CRITICAL-TEST] Continuing to pytest to confirm...') | |
| else: | |
| value = ctx.getPrimitiveData(patch_uuid, 'test_int', int) | |
| print(f'[CRITICAL-TEST] Retrieved value: {value}') | |
| print('[CRITICAL-TEST] Primitive data operations working correctly in macOS CI') | |
| print('[SUCCESS] cibuildwheel test completed successfully') | |
| except Exception as e: | |
| print(f'[FAILED] cibuildwheel test FAILED: {e}') | |
| import traceback | |
| traceback.print_exc() | |
| raise | |
| " && | |
| python -m pytest {project}/tests/ --tb=short -v --forked -m "not requires_gpu" -s | |
| CIBW_TEST_COMMAND_MACOS: | | |
| python -c " | |
| import sys, os | |
| print(f'=== cibuildwheel Test Environment ===') | |
| print(f'Python: {sys.executable}') | |
| print(f'Platform: {sys.platform}') | |
| print(f'Working directory: {os.getcwd()}') | |
| try: | |
| import pyhelios | |
| print(f'[SUCCESS] PyHelios3D {pyhelios.__version__} imported successfully') | |
| # Test native library functionality (most critical test) | |
| from pyhelios.plugins import get_plugin_info | |
| info = get_plugin_info() | |
| print(f'[SUCCESS] Platform: {info[\"platform\"]}') | |
| mock_mode = info.get('is_mock', True) | |
| print(f'[SUCCESS] Mock mode: {mock_mode}') | |
| if mock_mode: | |
| print('[FAILED] Mock mode detected - this violates fail-fast policy!') | |
| print('Mock mode indicates native libraries are not properly packaged in wheel.') | |
| print('This must be fixed before deployment.') | |
| raise RuntimeError('Mock mode detected in wheel testing - native libraries missing') | |
| else: | |
| lib_path = info.get('library_path', 'Unknown') | |
| print(f'[SUCCESS] Native library loaded: {lib_path}') | |
| # Test macOS expected plugins (6 plugins including visualization but no GPU) | |
| available_plugins = info.get('available_plugins', []) | |
| expected_plugins = ['weberpenntree', 'visualizer', 'photosynthesis', 'solarposition', 'stomatalconductance', 'plantarchitecture'] | |
| print(f'[INFO] Available plugins: {sorted(available_plugins)}') | |
| print(f'[INFO] Expected plugins: {sorted(expected_plugins)}') | |
| missing_plugins = set(expected_plugins) - set(available_plugins) | |
| if missing_plugins: | |
| print(f'[FAILED] Missing expected plugins: {sorted(missing_plugins)}') | |
| raise RuntimeError(f'Missing expected plugins: {missing_plugins}') | |
| else: | |
| print(f'[SUCCESS] All {len(expected_plugins)} expected plugins are available') | |
| # Asset validation (non-critical, allow failure) | |
| try: | |
| from pyhelios.assets import get_asset_manager | |
| manager = get_asset_manager() | |
| helios_build = manager._get_helios_build_path() | |
| if helios_build: | |
| print(f'[INFO] HELIOS_BUILD assets: {helios_build}') | |
| else: | |
| print('[INFO] HELIOS_BUILD assets not found (acceptable in wheel testing)') | |
| except Exception as e: | |
| print(f'[INFO] Asset validation skipped: {e}') | |
| # Critical test: primitive data operations that fail in CI | |
| print('[CRITICAL-TEST] Testing primitive data operations...') | |
| from pyhelios import Context | |
| ctx = Context() | |
| patch_uuid = ctx.addPatch() | |
| print(f'[CRITICAL-TEST] Created patch UUID: {patch_uuid}') | |
| ctx.setPrimitiveDataInt(patch_uuid, 'test_int', 42) | |
| print('[CRITICAL-TEST] setPrimitiveDataInt completed') | |
| exists = ctx.doesPrimitiveDataExist(patch_uuid, 'test_int') | |
| print(f'[CRITICAL-TEST] doesPrimitiveDataExist: {exists}') | |
| if not exists: | |
| print('[CRITICAL-TEST] REPRODUCED: Primitive data does not exist after setting!') | |
| print('[CRITICAL-TEST] This is the bug that causes CI test failures') | |
| print('[CRITICAL-TEST] Running comprehensive diagnostic...') | |
| try: | |
| exec(open('{project}/test_ci_diagnostic.py').read()) | |
| except Exception as diag_e: | |
| print(f'[CRITICAL-TEST] Diagnostic failed: {diag_e}') | |
| print('[CRITICAL-TEST] Continuing to pytest to confirm...') | |
| else: | |
| value = ctx.getPrimitiveData(patch_uuid, 'test_int', int) | |
| print(f'[CRITICAL-TEST] Retrieved value: {value}') | |
| print('[CRITICAL-TEST] Primitive data operations working correctly in CI') | |
| print('[SUCCESS] cibuildwheel test completed successfully') | |
| except Exception as e: | |
| print(f'[FAILED] cibuildwheel test FAILED: {e}') | |
| import traceback | |
| traceback.print_exc() | |
| raise | |
| " && | |
| python -m pytest {project}/tests/ --tb=short -v --forked -m "not requires_gpu" -s | |
| CIBW_TEST_COMMAND_WINDOWS: | | |
| python -c " | |
| import sys, os | |
| print(f'=== cibuildwheel Test Environment ===') | |
| print(f'Python: {sys.executable}') | |
| print(f'Platform: {sys.platform}') | |
| print(f'Working directory: {os.getcwd()}') | |
| try: | |
| import pyhelios | |
| print(f'[SUCCESS] PyHelios3D {pyhelios.__version__} imported successfully') | |
| # Test native library functionality (most critical test) | |
| from pyhelios.plugins import get_plugin_info | |
| info = get_plugin_info() | |
| print(f'[SUCCESS] Platform: {info[\"platform\"]}') | |
| mock_mode = info.get('is_mock', True) | |
| print(f'[SUCCESS] Mock mode: {mock_mode}') | |
| if mock_mode: | |
| print('[FAILED] Mock mode detected - this violates fail-fast policy!') | |
| print('Mock mode indicates native libraries are not properly packaged in wheel.') | |
| print('This must be fixed before deployment.') | |
| raise RuntimeError('Mock mode detected in wheel testing - native libraries missing') | |
| else: | |
| lib_path = info.get('library_path', 'Unknown') | |
| print(f'[SUCCESS] Native library loaded: {lib_path}') | |
| # Test Windows testable plugins (CI containers don't have GPU for radiation/energybalance testing) | |
| # We build GPU plugins but can't test them in CI environment | |
| available_plugins = info.get('available_plugins', []) | |
| testable_plugins = ['weberpenntree', 'visualizer', 'photosynthesis', 'solarposition', 'stomatalconductance', 'plantarchitecture'] | |
| built_plugins = ['weberpenntree', 'visualizer', 'photosynthesis', 'solarposition', 'stomatalconductance', 'energybalance', 'radiation', 'plantarchitecture'] | |
| print(f'[INFO] Available plugins: {sorted(available_plugins)}') | |
| print(f'[INFO] Built plugins (expected): {sorted(built_plugins)}') | |
| print(f'[INFO] Testable plugins (in CI): {sorted(testable_plugins)}') | |
| # Check that we have at least the testable plugins (GPU plugins may not be testable in CI) | |
| missing_testable = set(testable_plugins) - set(available_plugins) | |
| if missing_testable: | |
| print(f'[FAILED] Missing testable plugins: {sorted(missing_testable)}') | |
| raise RuntimeError(f'Missing testable plugins: {missing_testable}') | |
| else: | |
| print(f'[SUCCESS] All {len(testable_plugins)} testable plugins are available') | |
| # Report on GPU plugins (built but may not be testable in CI) | |
| gpu_plugins = ['energybalance', 'radiation'] | |
| available_gpu = [p for p in gpu_plugins if p in available_plugins] | |
| if available_gpu: | |
| print(f'[INFO] GPU plugins available for testing: {sorted(available_gpu)}') | |
| else: | |
| print(f'[INFO] GPU plugins not available for CI testing (expected in containerized environment)') | |
| # Asset validation (non-critical, allow failure) | |
| try: | |
| from pyhelios.assets import get_asset_manager | |
| manager = get_asset_manager() | |
| helios_build = manager._get_helios_build_path() | |
| if helios_build: | |
| print(f'[INFO] HELIOS_BUILD assets: {helios_build}') | |
| else: | |
| print('[INFO] HELIOS_BUILD assets not found (acceptable in wheel testing)') | |
| except Exception as e: | |
| print(f'[INFO] Asset validation skipped: {e}') | |
| # Critical test: primitive data operations that fail in CI | |
| print('[CRITICAL-TEST] Testing primitive data operations...') | |
| from pyhelios import Context | |
| ctx = Context() | |
| patch_uuid = ctx.addPatch() | |
| print(f'[CRITICAL-TEST] Created patch UUID: {patch_uuid}') | |
| ctx.setPrimitiveDataInt(patch_uuid, 'test_int', 42) | |
| print('[CRITICAL-TEST] setPrimitiveDataInt completed') | |
| exists = ctx.doesPrimitiveDataExist(patch_uuid, 'test_int') | |
| print(f'[CRITICAL-TEST] doesPrimitiveDataExist: {exists}') | |
| if not exists: | |
| print('[CRITICAL-TEST] REPRODUCED: Primitive data does not exist after setting!') | |
| print('[CRITICAL-TEST] This is the bug that causes CI test failures') | |
| print('[CRITICAL-TEST] Running comprehensive diagnostic...') | |
| try: | |
| exec(open('{project}/test_ci_diagnostic.py').read()) | |
| except Exception as diag_e: | |
| print(f'[CRITICAL-TEST] Diagnostic failed: {diag_e}') | |
| print('[CRITICAL-TEST] Continuing to pytest to confirm...') | |
| else: | |
| value = ctx.getPrimitiveData(patch_uuid, 'test_int', int) | |
| print(f'[CRITICAL-TEST] Retrieved value: {value}') | |
| print('[CRITICAL-TEST] Primitive data operations working correctly in CI') | |
| print('[SUCCESS] cibuildwheel test completed successfully') | |
| except Exception as e: | |
| print(f'[FAILED] cibuildwheel test FAILED: {e}') | |
| import traceback | |
| traceback.print_exc() | |
| raise | |
| " && | |
| python -m pytest {project}/tests/ --tb=short -v -m "not requires_gpu" -s | |
| # Platform-specific test requirements (pytest-forked only works on Unix systems) | |
| CIBW_TEST_REQUIRES_LINUX: "pytest pytest-forked" | |
| CIBW_TEST_REQUIRES_MACOS: "pytest pytest-forked" | |
| CIBW_TEST_REQUIRES_WINDOWS: "pytest" | |
| # Skip problematic platforms for testing | |
| CIBW_TEST_SKIP: "*-win32 *-manylinux_i686 *-musllinux*" | |
| # Repair wheels to bundle dependencies | |
| CIBW_REPAIR_WHEEL_COMMAND_MACOS: "delocate-wheel --require-archs {delocate_archs} -w {dest_dir} -v {wheel}" | |
| CIBW_REPAIR_WHEEL_COMMAND_LINUX: "auditwheel repair -w {dest_dir} {wheel}" | |
| - name: Debug build failure | |
| if: failure() | |
| shell: bash | |
| run: | | |
| echo "=== Build Failure Diagnostics ===" | |
| echo "Build directory contents:" | |
| find pyhelios_build -name "*.so" -o -name "*.dll" -o -name "*.dylib" 2>/dev/null || echo "No build directory found" | |
| echo "" | |
| echo "Plugin directory contents:" | |
| ls -la pyhelios/plugins/ 2>/dev/null || echo "No plugins directory found" | |
| echo "" | |
| echo "Wheel directory contents:" | |
| ls -la wheelhouse/ 2>/dev/null || echo "No wheelhouse directory found" | |
| echo "" | |
| echo "Python environment:" | |
| python --version | |
| pip list | grep -E "(cibuildwheel|auditwheel|delocate)" || echo "Wheel tools not found" | |
| - name: Validate wheel contents | |
| if: always() # Run even if build partially failed | |
| shell: bash | |
| run: | | |
| echo "=== Wheel Content Validation ===" | |
| for wheel in wheelhouse/*.whl; do | |
| if [ -f "$wheel" ]; then | |
| echo "Validating: $(basename "$wheel")" | |
| # Check wheel contains both Python files and native libraries | |
| python .github/scripts/validate_wheel.py "$wheel" | |
| else | |
| echo "No wheels found to validate" | |
| fi | |
| done | |
| - name: Upload wheels as artifacts | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: wheels-${{ matrix.os }}-${{ matrix.arch }} | |
| path: wheelhouse/*.whl | |
| retention-days: 7 | |
| test_wheels: | |
| name: Test wheels on ${{ matrix.os }} Python ${{ matrix.python-version }} | |
| runs-on: ${{ matrix.os }} | |
| needs: build_wheels | |
| strategy: | |
| matrix: | |
| include: | |
| - os: ubuntu-22.04 | |
| python-version: '3.8' | |
| - os: ubuntu-22.04 | |
| python-version: '3.11' | |
| - os: windows-2022 | |
| python-version: '3.8' | |
| - os: windows-2022 | |
| python-version: '3.11' | |
| - os: macos-13 | |
| python-version: '3.8' | |
| - os: macos-13 | |
| python-version: '3.11' | |
| - os: macos-14 | |
| python-version: '3.11' | |
| steps: | |
| - name: Checkout PyHelios for tests | |
| uses: actions/checkout@v4 | |
| - name: Set up Python ${{ matrix.python-version }} | |
| uses: actions/setup-python@v5 | |
| with: | |
| python-version: ${{ matrix.python-version }} | |
| - name: Download wheels | |
| uses: actions/download-artifact@v4 | |
| with: | |
| pattern: wheels-* | |
| merge-multiple: true | |
| path: wheelhouse | |
| - name: Install wheel and test dependencies | |
| shell: bash | |
| run: | | |
| python -m pip install --upgrade pip | |
| python -m pip install numpy pytest pyyaml | |
| # Install pytest-forked only on Unix systems (Windows doesn't support fork()) | |
| if [ "$RUNNER_OS" != "Windows" ]; then | |
| python -m pip install pytest-forked | |
| fi | |
| python -m pip install --no-index --find-links wheelhouse pyhelios3d | |
| - name: Test wheel functionality | |
| shell: bash | |
| run: | | |
| # Create cross-platform temporary directory for isolated testing | |
| if [ "$RUNNER_OS" == "Windows" ]; then | |
| ISOLATED_DIR="$RUNNER_TEMP/wheel_test" | |
| else | |
| ISOLATED_DIR="/tmp/wheel_test" | |
| fi | |
| mkdir -p "$ISOLATED_DIR" | |
| cd "$ISOLATED_DIR" | |
| echo "Testing wheel from isolated directory: $ISOLATED_DIR" | |
| echo "Current directory: $(pwd)" | |
| # Test wheel import in isolation (not contaminated by source code) | |
| python "$GITHUB_WORKSPACE/.github/scripts/test_wheel_import.py" | |
| # Copy complete test suite to isolated directory for clean wheel testing | |
| echo "Copying test suite to isolated directory for clean wheel testing" | |
| mkdir -p "$ISOLATED_DIR/tests" | |
| cp "$GITHUB_WORKSPACE/tests/test_"*.py "$ISOLATED_DIR/tests/" | |
| cp "$GITHUB_WORKSPACE/tests/conftest.py" "$ISOLATED_DIR/tests/" | |
| # Create platform-appropriate pytest.ini (remove --forked on Windows) | |
| if [ "$RUNNER_OS" != "Windows" ]; then | |
| cp "$GITHUB_WORKSPACE/pytest.ini" "$ISOLATED_DIR/" | |
| else | |
| # Create Windows-compatible pytest.ini without --forked | |
| sed '/--forked/d' "$GITHUB_WORKSPACE/pytest.ini" > "$ISOLATED_DIR/pytest.ini" 2>/dev/null || echo "# Wheel testing" > "$ISOLATED_DIR/pytest.ini" | |
| fi | |
| touch "$ISOLATED_DIR/tests/__init__.py" | |
| # Run pytest in isolated directory against copied test suite (tests real user import experience) | |
| cd "$ISOLATED_DIR" | |
| echo "Running pytest in isolated directory with copied test suite: $(pwd)" | |
| # Use forked execution on Unix systems for subprocess isolation (Windows doesn't support fork()) | |
| if [ "$RUNNER_OS" != "Windows" ]; then | |
| echo "Using forked execution for subprocess isolation" | |
| python -m pytest | |
| else | |
| echo "Using standard execution (Windows doesn't support fork())" | |
| python -m pytest | |
| fi | |
| start-gpu: | |
| name: Start GPU instance for wheel testing | |
| runs-on: ubuntu-latest | |
| needs: build_wheels | |
| if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') | |
| permissions: | |
| id-token: write | |
| contents: read | |
| steps: | |
| - uses: aws-actions/configure-aws-credentials@v2 | |
| with: | |
| role-to-assume: ${{ secrets.OIDC_ROLE_ARN }} | |
| aws-region: us-west-2 | |
| - run: | | |
| aws ec2 start-instances --instance-ids ${{ secrets.EC2_INSTANCE_ID_LINUX }} | |
| aws ec2 wait instance-running --instance-ids ${{ secrets.EC2_INSTANCE_ID_LINUX }} | |
| test_gpu_wheels: | |
| name: Test GPU wheels on self-hosted Linux runner | |
| runs-on: [self-hosted] | |
| needs: [build_wheels, start-gpu] | |
| if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') | |
| steps: | |
| - name: Checkout PyHelios for tests | |
| uses: actions/checkout@v4 | |
| - name: Set up Python | |
| uses: actions/setup-python@v5 | |
| with: | |
| python-version: '3.11' | |
| - name: Download wheels | |
| uses: actions/download-artifact@v4 | |
| with: | |
| pattern: wheels-* | |
| merge-multiple: true | |
| path: wheelhouse | |
| - name: Install wheel and test dependencies | |
| run: | | |
| python -m pip install --upgrade pip | |
| python -m pip install numpy pytest pyyaml pytest-forked | |
| python -m pip install --no-index --find-links wheelhouse pyhelios3d | |
| - name: Test GPU wheel functionality | |
| run: | | |
| # Create isolated directory for GPU wheel testing | |
| ISOLATED_DIR="/tmp/gpu_wheel_test" | |
| mkdir -p "$ISOLATED_DIR" | |
| cd "$ISOLATED_DIR" | |
| echo "Testing GPU wheel from isolated directory: $ISOLATED_DIR" | |
| echo "Current directory: $(pwd)" | |
| # Test wheel import and GPU plugin availability | |
| python -c " | |
| import sys, os | |
| print(f'=== GPU Wheel Test Environment ===') | |
| print(f'Python: {sys.executable}') | |
| print(f'Platform: {sys.platform}') | |
| print(f'Working directory: {os.getcwd()}') | |
| try: | |
| import pyhelios | |
| print(f'[SUCCESS] PyHelios3D {pyhelios.__version__} imported successfully') | |
| # Test native library functionality with focus on GPU plugins | |
| from pyhelios.plugins import get_plugin_info | |
| info = get_plugin_info() | |
| print(f'[SUCCESS] Platform: {info[\"platform\"]}') | |
| mock_mode = info.get('is_mock', True) | |
| print(f'[SUCCESS] Mock mode: {mock_mode}') | |
| if mock_mode: | |
| print('[FAILED] Mock mode detected - GPU wheel should have native libraries!') | |
| raise RuntimeError('Mock mode detected in GPU wheel testing - native libraries missing') | |
| else: | |
| lib_path = info.get('library_path', 'Unknown') | |
| print(f'[SUCCESS] Native library loaded: {lib_path}') | |
| # Test GPU plugin availability (critical for GPU wheels) | |
| available_plugins = info.get('available_plugins', []) | |
| gpu_plugins = ['energybalance', 'radiation'] | |
| print(f'[INFO] Available plugins: {sorted(available_plugins)}') | |
| print(f'[INFO] GPU plugins to test: {sorted(gpu_plugins)}') | |
| available_gpu = [p for p in gpu_plugins if p in available_plugins] | |
| if available_gpu: | |
| print(f'[SUCCESS] GPU plugins available: {sorted(available_gpu)}') | |
| # Test GPU functionality if radiation plugin is available | |
| if 'radiation' in available_gpu: | |
| print('[GPU-TEST] Testing radiation plugin...') | |
| from pyhelios import Context, RadiationModel | |
| ctx = Context() | |
| # Test basic radiation model creation | |
| with RadiationModel(ctx) as radiation: | |
| radiation.addRadiationBand(\"SW\") | |
| print('[GPU-TEST] RadiationModel created and band added successfully') | |
| else: | |
| print('[WARNING] No GPU plugins available - may be expected in containerized environment') | |
| print('[INFO] GPU wheels built successfully but GPU hardware may not be available') | |
| print('[SUCCESS] GPU wheel test completed successfully') | |
| except Exception as e: | |
| print(f'[FAILED] GPU wheel test FAILED: {e}') | |
| import traceback | |
| traceback.print_exc() | |
| raise | |
| " | |
| # Copy and run complete test suite for comprehensive validation | |
| echo "Copying complete test suite to isolated directory" | |
| mkdir -p "$ISOLATED_DIR/tests" | |
| cp "$GITHUB_WORKSPACE/tests/test_"*.py "$ISOLATED_DIR/tests/" | |
| cp "$GITHUB_WORKSPACE/tests/conftest.py" "$ISOLATED_DIR/tests/" | |
| cp "$GITHUB_WORKSPACE/pytest.ini" "$ISOLATED_DIR/" | |
| touch "$ISOLATED_DIR/tests/__init__.py" | |
| # Run complete test suite in isolated directory | |
| cd "$ISOLATED_DIR" | |
| echo "Running complete test suite in isolated directory: $(pwd)" | |
| # Run all tests to ensure comprehensive validation on GPU hardware | |
| python -m pytest tests/ --tb=short -v -s | |
| stop-gpu: | |
| name: Stop GPU instance | |
| needs: test_gpu_wheels | |
| if: always() && (github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v')) | |
| runs-on: ubuntu-latest | |
| permissions: | |
| id-token: write | |
| contents: read | |
| steps: | |
| - uses: aws-actions/configure-aws-credentials@v2 | |
| with: | |
| role-to-assume: ${{ secrets.OIDC_ROLE_ARN }} | |
| aws-region: us-west-2 | |
| - run: | | |
| aws ec2 stop-instances --instance-ids ${{ secrets.EC2_INSTANCE_ID_LINUX }} | |
| aws ec2 wait instance-stopped --instance-ids ${{ secrets.EC2_INSTANCE_ID_LINUX }} | |
| publish: | |
| name: Publish to PyPI | |
| runs-on: ubuntu-latest | |
| needs: [build_wheels, test_wheels, test_gpu_wheels] | |
| if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') | |
| environment: | |
| name: pypi | |
| url: https://pypi.org/p/pyhelios3d | |
| permissions: | |
| id-token: write # Required for trusted publishing | |
| steps: | |
| - name: Download all wheels | |
| uses: actions/download-artifact@v4 | |
| with: | |
| pattern: wheels-* | |
| merge-multiple: true | |
| path: wheelhouse | |
| - name: Publish to PyPI | |
| uses: pypa/gh-action-pypi-publish@release/v1 | |
| with: | |
| packages-dir: wheelhouse/ | |
| verify-metadata: false # Skip metadata verification due to dynamic versioning |