[v0.1.13] 2025-12-25 #213
Workflow file for this run
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: Build Wheels | |
| on: | |
| push: | |
| tags: ['v*','testing-v*'] | |
| workflow_dispatch: | |
| jobs: | |
| build_wheels: | |
| name: Build wheels on ${{ matrix.os }} | |
| runs-on: ${{ matrix.os }} | |
| timeout-minutes: 60 | |
| strategy: | |
| matrix: | |
| include: | |
| - os: ubuntu-22.04 | |
| arch: x86_64 | |
| - os: windows-2022 | |
| arch: AMD64 | |
| - os: macos-14 | |
| arch: arm64 | |
| steps: | |
| # Only for tag builds: force setuptools-scm to use the tag's version | |
| - name: Set package version from tag | |
| if: startsWith(github.ref, 'refs/tags/') | |
| shell: bash | |
| run: echo "SETUPTOOLS_SCM_PRETEND_VERSION=${GITHUB_REF_NAME#v}" >> "$GITHUB_ENV" | |
| - name: Checkout PyHelios | |
| uses: actions/checkout@v4 | |
| with: | |
| submodules: recursive | |
| fetch-depth: 0 # Required for setuptools-scm | |
| # Ensure all tags are fetched | |
| fetch-tags: true | |
| - name: Debug version detection | |
| shell: bash | |
| run: | | |
| echo "=== Git tag information ===" | |
| git tag --list | head -10 | |
| echo "Current HEAD: $(git rev-parse HEAD)" | |
| echo "Describe: $(git describe --tags --always --dirty)" | |
| echo "=== Setuptools-scm version detection ===" | |
| python -m pip install setuptools-scm | |
| python -c "from setuptools_scm import get_version; print(f'Detected version: {get_version()}')" || echo "Version detection failed" | |
| - name: Set up Python | |
| uses: actions/setup-python@v5 | |
| with: | |
| python-version: '3.11' # Updated for cibuildwheel 3.x compatibility | |
| - name: Setup MSVC (Windows) | |
| if: runner.os == 'Windows' | |
| uses: ilammy/msvc-dev-cmd@v1 | |
| - name: Install Helios dependencies (macOS) | |
| if: runner.os == 'macOS' | |
| run: | | |
| cd helios-core/utilities | |
| # Install base + visualization dependencies (no GPU/CUDA for macOS builds) | |
| bash dependencies.sh BASE | |
| bash dependencies.sh VIS | |
| - name: Debug environment (macOS) | |
| if: runner.os == 'macOS' | |
| run: | | |
| echo "=== Directory structure ===" | |
| ls -la | |
| echo "=== PyHelios build scripts ===" | |
| ls -la build_scripts/ | |
| echo "=== Helios core ===" | |
| ls -la helios-core/ || echo "helios-core not found" | |
| echo "=== Python version and location ===" | |
| python --version | |
| which python | |
| echo "=== Environment ===" | |
| env | grep -E "(PYTHON|PATH)" | head -10 | |
| - name: Install CUDA Toolkit (Windows) | |
| if: runner.os == 'Windows' | |
| id: cuda | |
| uses: Jimver/cuda-toolkit@v0.2.24 | |
| with: | |
| cuda: '12.6.2' | |
| method: network | |
| # Version 0.2.24 works correctly on Windows | |
| use-github-cache: true | |
| use-local-cache: true | |
| log-file-suffix: log.txt | |
| - name: Validate CUDA presence (Windows) | |
| if: runner.os == 'Windows' | |
| shell: cmd | |
| run: | | |
| echo CUDA_PATH=%CUDA_PATH% | |
| where nvcc | |
| nvcc -V | |
| if not exist "%CUDA_PATH%\lib\x64\cudart.lib" (echo cudart.lib missing & exit /b 1) | |
| - name: Install cibuildwheel and repair tools | |
| shell: bash | |
| run: | | |
| python -m pip install --upgrade pip | |
| python -m pip install 'cibuildwheel>=2.23.0' | |
| # Install platform-specific wheel repair tools | |
| if [ "${{ runner.os }}" = "Linux" ]; then | |
| python -m pip install auditwheel | |
| elif [ "${{ runner.os }}" = "macOS" ]; then | |
| python -m pip install delocate | |
| fi | |
| - name: Debug version detection | |
| shell: bash | |
| run: | | |
| echo "=== Version Detection Debug ===" | |
| echo "Git describe: $(git describe --tags --long --dirty)" | |
| echo "Git tags:" | |
| git tag -l --sort=-version:refname | head -10 | |
| echo "Git log (recent commits):" | |
| git log --oneline --decorate -5 | |
| pip install setuptools-scm | |
| echo "setuptools-scm version: $(python -m setuptools_scm)" | |
| echo "================================" | |
| - name: Free disk space for CUDA installation (Linux only) | |
| if: runner.os == 'Linux' | |
| uses: jlumbroso/free-disk-space@main | |
| with: | |
| # Free up ~31GB in 3 minutes - essential for CUDA toolkit installation | |
| android: true # Frees ~14GB Android SDK/NDK | |
| dotnet: false # Frees ~2.7GB .NET runtime | |
| haskell: false # Frees ~5.2GB Haskell toolchain | |
| large-packages: false # Frees ~5.3GB various large packages | |
| tool-cache: false # Keep Python/Node tools needed for build | |
| swap-storage: false # Frees ~4GB swap space | |
| - name: Build manylinux CUDA shim (adds working /bin/sh and /usr/bin/sh) | |
| if: runner.os == 'Linux' | |
| shell: bash | |
| run: | | |
| cat > Dockerfile.ci <<'EOF' | |
| FROM sameli/manylinux2014_x86_64_cuda_11.8:latest | |
| # Ensure bash is present | |
| RUN yum install -y bash coreutils && \ | |
| # Provide sh in both canonical locations that various tools try first | |
| if [ ! -e /bin/sh ]; then ln -s /bin/bash /bin/sh; fi && \ | |
| if [ ! -e /usr/bin/sh ]; then ln -s /bin/bash /usr/bin/sh; fi && \ | |
| # Make sure containers launched without a login shell still see /bin and /usr/bin | |
| echo 'PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin' > /etc/environment | |
| EOF | |
| docker build -t pyhelios/manylinux2014_x86_64_cuda_11.8_shim -f Dockerfile.ci . | |
| # Prove sh resolves in the built image before we hand it to cibuildwheel | |
| docker run --rm pyhelios/manylinux2014_x86_64_cuda_11.8_shim /usr/bin/env sh -c 'echo "[shim] sh works: $0"' | |
| docker run --rm pyhelios/manylinux2014_x86_64_cuda_11.8_shim sh -c 'echo "[shim] PATH=$PATH"; command -v sh; ls -l /bin/sh /usr/bin/sh' | |
| - name: Build wheels | |
| run: python -m cibuildwheel --output-dir wheelhouse | |
| timeout-minutes: 30 # Single timeout for the entire step | |
| env: | |
| # Build for Python 3.8+ on all platforms | |
| CIBW_BUILD: cp38-* cp39-* cp310-* cp311-* cp312-* | |
| # Fail fast on build errors instead of continuing to next Python version | |
| CIBW_BUILD_VERBOSITY: 1 | |
| # Skip 32-bit builds and Python 3.8 on ARM64 due to cross-compilation issues | |
| # Python 3.8 lacks official ARM64 support, causing OpenMP linking issues in cibuildwheel | |
| CIBW_SKIP: "*-win32 *-manylinux_i686 *-musllinux* cp38-macosx_arm64" | |
| # Architecture configuration based on runner | |
| CIBW_ARCHS: ${{ matrix.arch }} | |
| # Use stable manylinux images to avoid gateway timeout issues | |
| CIBW_MANYLINUX_X86_64_IMAGE: pyhelios/manylinux2014_x86_64_cuda_11.8_shim | |
| CIBW_MANYLINUX_AARCH64_IMAGE: manylinux2014 | |
| # Platform-specific build commands with explicit plugin selection for consistency | |
| # macOS: Include visualizer but exclude GPU plugins due to cross-compilation issues | |
| CIBW_BEFORE_BUILD_MACOS: | | |
| if [ ! -f pyhelios_build/build/lib/libhelios.dylib ]; then | |
| echo "[macOS] First ABI: building native libs..." | |
| python build_scripts/prepare_wheel.py \ | |
| --buildmode release \ | |
| --nogpu \ | |
| --verbose \ | |
| --clean \ | |
| --cmake-args=-DENABLE_OPENMP=OFF \ | |
| --cmake-args=-DCMAKE_IGNORE_PATH=/opt/homebrew \ | |
| --cmake-args=-DCMAKE_IGNORE_PREFIX_PATH=/opt/homebrew \ | |
| --cmake-args=-DCMAKE_SYSTEM_IGNORE_PATH=/opt/homebrew | |
| else | |
| echo "[macOS] Reusing previously built native libs; skipping rebuild." | |
| fi | |
| CIBW_ENVIRONMENT_MACOS: "MACOSX_DEPLOYMENT_TARGET=14.0 CMAKE_OSX_ARCHITECTURES=${{ matrix.arch }} SYSTEM_VERSION_COMPAT=0" | |
| # Ensure CUDA is visible inside cibuildwheel's Windows build environment | |
| CIBW_ENVIRONMENT_PASS_WINDOWS: "CUDA_PATH CUDA_HOME" | |
| CIBW_BEFORE_BUILD_WINDOWS: > | |
| echo "=== CIBW_BEFORE_BUILD_WINDOWS Started ===" && | |
| set "PATH=%CUDA_PATH%\bin;%PATH%" && | |
| echo "CUDA_PATH: %CUDA_PATH%" && | |
| echo "PATH updated with CUDA bin directory" && | |
| (where nvcc || echo "nvcc not found in PATH") && | |
| (nvcc -V || echo "nvcc -V failed") && | |
| set "CMAKE_RC_COMPILER=" && | |
| set "CMAKE_CUDA_COMPILER=%CUDA_PATH%\bin\nvcc.exe" && | |
| set "PYHELIOS_CUDA_ARCHITECTURES=50;60;70;75;80;86;90" && | |
| echo "Environment variables set, starting native build..." && | |
| python build_scripts/prepare_wheel.py --buildmode release --verbose && | |
| echo "=== CIBW_BEFORE_BUILD_WINDOWS Completed ===" | |
| CIBW_SHELL_LINUX: "/usr/bin/bash -eo pipefail -c" | |
| CIBW_ENVIRONMENT_PASS_LINUX: "SHELL" | |
| CIBW_BEFORE_ALL_LINUX: > | |
| if [ -f /opt/rh/devtoolset-10/enable ]; then | |
| echo "Enabling devtoolset-10 for C++17 support" && source /opt/rh/devtoolset-10/enable | |
| elif [ -f /opt/rh/devtoolset-9/enable ]; then | |
| echo "Enabling devtoolset-9 for C++17 support" && source /opt/rh/devtoolset-9/enable | |
| elif [ -f /opt/rh/devtoolset-8/enable ]; then | |
| echo "Enabling devtoolset-8 for C++17 support" && source /opt/rh/devtoolset-8/enable | |
| else | |
| echo "ERROR: No devtoolset found - C++17 compilation will fail" | |
| echo "Available devtoolsets:" && ls -la /opt/rh/ | grep devtoolset || echo "None found" | |
| echo "System GCC version: $(gcc --version | head -1)" | |
| if ! gcc --version | grep -q -E "(9\.|10\.|11\.|12\.|13\.|14\.)" && ! gcc --version | grep -q "8\.[3-9]"; then | |
| echo "FATAL: System GCC too old for C++17, devtoolset required" && exit 1 | |
| fi | |
| fi && | |
| echo "Active compiler: $(which gcc)" && | |
| echo "Compiler version: $(gcc --version | head -1)" && | |
| yum install -y zlib-devel mesa-libGL-devel mesa-libEGL-devel libX11-devel libXrandr-devel mesa-libGLU-devel libXinerama-devel libXcursor-devel libXi-devel libXxf86vm-devel && | |
| export PKG_CONFIG_PATH="/usr/lib64/pkgconfig:/usr/lib/pkgconfig:${PKG_CONFIG_PATH}" && | |
| export CMAKE_PREFIX_PATH="/usr:${CMAKE_PREFIX_PATH}" && | |
| /opt/python/cp39-cp39/bin/python build_scripts/prepare_wheel.py --buildmode release --verbose --clean | |
| # Per-ABI: do not rebuild; only stage/copy already built artifacts | |
| CIBW_BEFORE_BUILD_LINUX: > | |
| if [ -f /opt/rh/devtoolset-10/enable ]; then | |
| source /opt/rh/devtoolset-10/enable | |
| elif [ -f /opt/rh/devtoolset-9/enable ]; then | |
| source /opt/rh/devtoolset-9/enable | |
| elif [ -f /opt/rh/devtoolset-8/enable ]; then | |
| source /opt/rh/devtoolset-8/enable | |
| else | |
| echo "ERROR: No devtoolset found for per-ABI build" | |
| if ! gcc --version | grep -q -E "(9\.|10\.|11\.|12\.|13\.|14\.)" && ! gcc --version | grep -q "8\.[3-9]"; then | |
| echo "FATAL: System GCC too old for C++17" && exit 1 | |
| fi | |
| fi && | |
| export PKG_CONFIG_PATH="/usr/lib64/pkgconfig:/usr/lib/pkgconfig:${PKG_CONFIG_PATH}" && | |
| export CMAKE_PREFIX_PATH="/usr:${CMAKE_PREFIX_PATH}" && | |
| echo "Reusing native build; packaging only." && | |
| /opt/python/cp39-cp39/bin/python build_scripts/prepare_wheel.py --buildmode release --verbose | |
| # Manylinux-specific environment for zlib compatibility and OpenGL discovery | |
| # Use documented $PATH expansion to append devtoolset without overriding cibuildwheel's Python | |
| CIBW_ENVIRONMENT_LINUX: "SHELL=/usr/bin/bash PATH=$PATH:/opt/rh/devtoolset-10/root/usr/bin:/opt/rh/devtoolset-9/root/usr/bin:/opt/rh/devtoolset-8/root/usr/bin CC=/opt/rh/devtoolset-10/root/usr/bin/gcc CXX=/opt/rh/devtoolset-10/root/usr/bin/g++ CFLAGS='-D_GNU_SOURCE -I/usr/include' CXXFLAGS='-D_GNU_SOURCE -D_GLIBCXX_USE_CXX11_ABI=0 -I/usr/include' CMAKE_C_COMPILER=/opt/rh/devtoolset-10/root/usr/bin/gcc CMAKE_CXX_COMPILER=/opt/rh/devtoolset-10/root/usr/bin/g++ CMAKE_C_FLAGS='-D_GNU_SOURCE' CMAKE_CXX_FLAGS='-D_GNU_SOURCE -D_GLIBCXX_USE_CXX11_ABI=0' PKG_CONFIG_PATH=/usr/lib64/pkgconfig:/usr/lib/pkgconfig CMAKE_PREFIX_PATH=/usr" | |
| # Comprehensive wheel testing using pytest suite | |
| CIBW_TEST_COMMAND: | | |
| python -c " | |
| import sys, os | |
| print(f'=== cibuildwheel Test Environment ===') | |
| print(f'Python: {sys.executable}') | |
| print(f'Platform: {sys.platform}') | |
| print(f'Working directory: {os.getcwd()}') | |
| try: | |
| import pyhelios | |
| print(f'[SUCCESS] PyHelios3D {pyhelios.__version__} imported successfully') | |
| # Test native library functionality (most critical test) | |
| from pyhelios.plugins import get_plugin_info | |
| info = get_plugin_info() | |
| print(f'[SUCCESS] Platform: {info[\"platform\"]}') | |
| mock_mode = info.get('is_mock', True) | |
| print(f'[SUCCESS] Mock mode: {mock_mode}') | |
| if mock_mode: | |
| print('[FAILED] Mock mode detected - this violates fail-fast policy!') | |
| print('Mock mode indicates native libraries are not properly packaged in wheel.') | |
| print('This must be fixed before deployment.') | |
| raise RuntimeError('Mock mode detected in wheel testing - native libraries missing') | |
| else: | |
| lib_path = info.get('library_path', 'Unknown') | |
| print(f'[SUCCESS] Native library loaded: {lib_path}') | |
| # Asset validation (non-critical, allow failure) | |
| try: | |
| from pyhelios.assets import get_asset_manager | |
| manager = get_asset_manager() | |
| helios_build = manager._get_helios_build_path() | |
| if helios_build: | |
| print(f'[INFO] HELIOS_BUILD assets: {helios_build}') | |
| else: | |
| print('[INFO] HELIOS_BUILD assets not found (acceptable in wheel testing)') | |
| except Exception as e: | |
| print(f'[INFO] Asset validation skipped: {e}') | |
| print('[SUCCESS] cibuildwheel test completed successfully') | |
| except Exception as e: | |
| print(f'[FAILED] cibuildwheel test FAILED: {e}') | |
| import traceback | |
| traceback.print_exc() | |
| raise | |
| " | |
| # Platform-specific pytest commands (Windows doesn't support --forked) | |
| CIBW_TEST_COMMAND_LINUX: | | |
| python -c " | |
| import sys, os | |
| print(f'=== cibuildwheel Test Environment ===') | |
| print(f'Python: {sys.executable}') | |
| print(f'Platform: {sys.platform}') | |
| print(f'Working directory: {os.getcwd()}') | |
| try: | |
| import pyhelios | |
| print(f'[SUCCESS] PyHelios3D {pyhelios.__version__} imported successfully') | |
| # Test native library functionality (most critical test) | |
| from pyhelios.plugins import get_plugin_info | |
| info = get_plugin_info() | |
| print(f'[SUCCESS] Platform: {info[\"platform\"]}') | |
| mock_mode = info.get('is_mock', True) | |
| print(f'[SUCCESS] Mock mode: {mock_mode}') | |
| if mock_mode: | |
| print('[FAILED] Mock mode detected - this violates fail-fast policy!') | |
| print('Mock mode indicates native libraries are not properly packaged in wheel.') | |
| print('This must be fixed before deployment.') | |
| raise RuntimeError('Mock mode detected in wheel testing - native libraries missing') | |
| else: | |
| lib_path = info.get('library_path', 'Unknown') | |
| print(f'[SUCCESS] Native library loaded: {lib_path}') | |
| # Test Linux testable plugins including headless visualizer | |
| # Only GPU-accelerated plugins require special hardware not available in CI | |
| available_plugins = info.get('available_plugins', []) | |
| testable_plugins = ['weberpenntree', 'visualizer', 'photosynthesis', 'solarposition', 'stomatalconductance', 'plantarchitecture'] | |
| built_plugins = ['weberpenntree', 'visualizer', 'photosynthesis', 'solarposition', 'stomatalconductance', 'energybalance', 'radiation', 'plantarchitecture'] | |
| print(f'[INFO] Available plugins: {sorted(available_plugins)}') | |
| print(f'[INFO] Built plugins (expected): {sorted(built_plugins)}') | |
| print(f'[INFO] Testable plugins (in CI): {sorted(testable_plugins)}') | |
| # Check that we have at least the testable plugins | |
| missing_testable = set(testable_plugins) - set(available_plugins) | |
| if missing_testable: | |
| print(f'[FAILED] Missing testable plugins: {sorted(missing_testable)}') | |
| print('[INFO] Visualizer plugin requires headless OpenGL support in CI') | |
| raise RuntimeError(f'Missing testable plugins: {missing_testable}') | |
| else: | |
| print(f'[SUCCESS] All {len(testable_plugins)} testable plugins are available') | |
| # Report on GPU plugins (built but may not be testable in CI without GPU hardware) | |
| gpu_plugins = ['energybalance', 'radiation'] | |
| available_gpu = [p for p in gpu_plugins if p in available_plugins] | |
| if available_gpu: | |
| print(f'[INFO] GPU plugins available for testing: {sorted(available_gpu)}') | |
| else: | |
| print(f'[INFO] GPU plugins not available for CI testing (expected in containerized environment without GPU)') | |
| # Asset validation (non-critical, allow failure) | |
| try: | |
| from pyhelios.assets import get_asset_manager | |
| manager = get_asset_manager() | |
| helios_build = manager._get_helios_build_path() | |
| if helios_build: | |
| print(f'[INFO] HELIOS_BUILD assets: {helios_build}') | |
| else: | |
| print('[INFO] HELIOS_BUILD assets not found (acceptable in wheel testing)') | |
| except Exception as e: | |
| print(f'[INFO] Asset validation skipped: {e}') | |
| # Critical test: primitive data operations that fail in macOS CI | |
| print('[CRITICAL-TEST] Testing primitive data operations...') | |
| from pyhelios import Context | |
| ctx = Context() | |
| patch_uuid = ctx.addPatch() | |
| print(f'[CRITICAL-TEST] Created patch UUID: {patch_uuid}') | |
| ctx.setPrimitiveDataInt(patch_uuid, 'test_int', 42) | |
| print('[CRITICAL-TEST] setPrimitiveDataInt completed') | |
| exists = ctx.doesPrimitiveDataExist(patch_uuid, 'test_int') | |
| print(f'[CRITICAL-TEST] doesPrimitiveDataExist: {exists}') | |
| if not exists: | |
| print('[CRITICAL-TEST] REPRODUCED: Primitive data does not exist after setting!') | |
| print('[CRITICAL-TEST] This is the bug that causes macOS CI test failures') | |
| print('[CRITICAL-TEST] Continuing to pytest to confirm...') | |
| else: | |
| value = ctx.getPrimitiveData(patch_uuid, 'test_int', int) | |
| print(f'[CRITICAL-TEST] Retrieved value: {value}') | |
| print('[CRITICAL-TEST] Primitive data operations working correctly in macOS CI') | |
| print('[SUCCESS] cibuildwheel test completed successfully') | |
| except Exception as e: | |
| print(f'[FAILED] cibuildwheel test FAILED: {e}') | |
| import traceback | |
| traceback.print_exc() | |
| raise | |
| " && | |
| python -m pytest {project}/tests/ --tb=short -v --forked -m "not requires_gpu" -s | |
| CIBW_TEST_COMMAND_MACOS: | | |
| python -c " | |
| import sys, os | |
| print(f'=== cibuildwheel Test Environment ===') | |
| print(f'Python: {sys.executable}') | |
| print(f'Platform: {sys.platform}') | |
| print(f'Working directory: {os.getcwd()}') | |
| try: | |
| import pyhelios | |
| print(f'[SUCCESS] PyHelios3D {pyhelios.__version__} imported successfully') | |
| # Test native library functionality (most critical test) | |
| from pyhelios.plugins import get_plugin_info | |
| info = get_plugin_info() | |
| print(f'[SUCCESS] Platform: {info[\"platform\"]}') | |
| mock_mode = info.get('is_mock', True) | |
| print(f'[SUCCESS] Mock mode: {mock_mode}') | |
| if mock_mode: | |
| print('[FAILED] Mock mode detected - this violates fail-fast policy!') | |
| print('Mock mode indicates native libraries are not properly packaged in wheel.') | |
| print('This must be fixed before deployment.') | |
| raise RuntimeError('Mock mode detected in wheel testing - native libraries missing') | |
| else: | |
| lib_path = info.get('library_path', 'Unknown') | |
| print(f'[SUCCESS] Native library loaded: {lib_path}') | |
| # Test macOS expected plugins (6 plugins including visualization but no GPU) | |
| available_plugins = info.get('available_plugins', []) | |
| expected_plugins = ['weberpenntree', 'visualizer', 'photosynthesis', 'solarposition', 'stomatalconductance', 'plantarchitecture'] | |
| print(f'[INFO] Available plugins: {sorted(available_plugins)}') | |
| print(f'[INFO] Expected plugins: {sorted(expected_plugins)}') | |
| missing_plugins = set(expected_plugins) - set(available_plugins) | |
| if missing_plugins: | |
| print(f'[FAILED] Missing expected plugins: {sorted(missing_plugins)}') | |
| raise RuntimeError(f'Missing expected plugins: {missing_plugins}') | |
| else: | |
| print(f'[SUCCESS] All {len(expected_plugins)} expected plugins are available') | |
| # Asset validation (non-critical, allow failure) | |
| try: | |
| from pyhelios.assets import get_asset_manager | |
| manager = get_asset_manager() | |
| helios_build = manager._get_helios_build_path() | |
| if helios_build: | |
| print(f'[INFO] HELIOS_BUILD assets: {helios_build}') | |
| else: | |
| print('[INFO] HELIOS_BUILD assets not found (acceptable in wheel testing)') | |
| except Exception as e: | |
| print(f'[INFO] Asset validation skipped: {e}') | |
| # Critical test: primitive data operations that fail in CI | |
| print('[CRITICAL-TEST] Testing primitive data operations...') | |
| from pyhelios import Context | |
| ctx = Context() | |
| patch_uuid = ctx.addPatch() | |
| print(f'[CRITICAL-TEST] Created patch UUID: {patch_uuid}') | |
| ctx.setPrimitiveDataInt(patch_uuid, 'test_int', 42) | |
| print('[CRITICAL-TEST] setPrimitiveDataInt completed') | |
| exists = ctx.doesPrimitiveDataExist(patch_uuid, 'test_int') | |
| print(f'[CRITICAL-TEST] doesPrimitiveDataExist: {exists}') | |
| if not exists: | |
| print('[CRITICAL-TEST] REPRODUCED: Primitive data does not exist after setting!') | |
| print('[CRITICAL-TEST] This is the bug that causes CI test failures') | |
| print('[CRITICAL-TEST] Running comprehensive diagnostic...') | |
| try: | |
| exec(open('{project}/test_ci_diagnostic.py').read()) | |
| except Exception as diag_e: | |
| print(f'[CRITICAL-TEST] Diagnostic failed: {diag_e}') | |
| print('[CRITICAL-TEST] Continuing to pytest to confirm...') | |
| else: | |
| value = ctx.getPrimitiveData(patch_uuid, 'test_int', int) | |
| print(f'[CRITICAL-TEST] Retrieved value: {value}') | |
| print('[CRITICAL-TEST] Primitive data operations working correctly in CI') | |
| print('[SUCCESS] cibuildwheel test completed successfully') | |
| except Exception as e: | |
| print(f'[FAILED] cibuildwheel test FAILED: {e}') | |
| import traceback | |
| traceback.print_exc() | |
| raise | |
| " && | |
| python -m pytest {project}/tests/ --tb=short -v --forked -m "not requires_gpu" -s | |
| CIBW_TEST_COMMAND_WINDOWS: | | |
| python -c " | |
| import sys, os | |
| print(f'=== cibuildwheel Test Environment ===') | |
| print(f'Python: {sys.executable}') | |
| print(f'Platform: {sys.platform}') | |
| print(f'Working directory: {os.getcwd()}') | |
| try: | |
| import pyhelios | |
| print(f'[SUCCESS] PyHelios3D {pyhelios.__version__} imported successfully') | |
| # Test native library functionality (most critical test) | |
| from pyhelios.plugins import get_plugin_info | |
| info = get_plugin_info() | |
| print(f'[SUCCESS] Platform: {info[\"platform\"]}') | |
| mock_mode = info.get('is_mock', True) | |
| print(f'[SUCCESS] Mock mode: {mock_mode}') | |
| if mock_mode: | |
| print('[FAILED] Mock mode detected - this violates fail-fast policy!') | |
| print('Mock mode indicates native libraries are not properly packaged in wheel.') | |
| print('This must be fixed before deployment.') | |
| raise RuntimeError('Mock mode detected in wheel testing - native libraries missing') | |
| else: | |
| lib_path = info.get('library_path', 'Unknown') | |
| print(f'[SUCCESS] Native library loaded: {lib_path}') | |
| # Test Windows testable plugins (CI containers don't have GPU for radiation/energybalance testing) | |
| # We build GPU plugins but can't test them in CI environment | |
| available_plugins = info.get('available_plugins', []) | |
| testable_plugins = ['weberpenntree', 'visualizer', 'photosynthesis', 'solarposition', 'stomatalconductance', 'plantarchitecture'] | |
| built_plugins = ['weberpenntree', 'visualizer', 'photosynthesis', 'solarposition', 'stomatalconductance', 'energybalance', 'radiation', 'plantarchitecture'] | |
| print(f'[INFO] Available plugins: {sorted(available_plugins)}') | |
| print(f'[INFO] Built plugins (expected): {sorted(built_plugins)}') | |
| print(f'[INFO] Testable plugins (in CI): {sorted(testable_plugins)}') | |
| # Check that we have at least the testable plugins (GPU plugins may not be testable in CI) | |
| missing_testable = set(testable_plugins) - set(available_plugins) | |
| if missing_testable: | |
| print(f'[FAILED] Missing testable plugins: {sorted(missing_testable)}') | |
| raise RuntimeError(f'Missing testable plugins: {missing_testable}') | |
| else: | |
| print(f'[SUCCESS] All {len(testable_plugins)} testable plugins are available') | |
| # Report on GPU plugins (built but may not be testable in CI) | |
| gpu_plugins = ['energybalance', 'radiation'] | |
| available_gpu = [p for p in gpu_plugins if p in available_plugins] | |
| if available_gpu: | |
| print(f'[INFO] GPU plugins available for testing: {sorted(available_gpu)}') | |
| else: | |
| print(f'[INFO] GPU plugins not available for CI testing (expected in containerized environment)') | |
| # Asset validation (non-critical, allow failure) | |
| try: | |
| from pyhelios.assets import get_asset_manager | |
| manager = get_asset_manager() | |
| helios_build = manager._get_helios_build_path() | |
| if helios_build: | |
| print(f'[INFO] HELIOS_BUILD assets: {helios_build}') | |
| else: | |
| print('[INFO] HELIOS_BUILD assets not found (acceptable in wheel testing)') | |
| except Exception as e: | |
| print(f'[INFO] Asset validation skipped: {e}') | |
| # Critical test: primitive data operations that fail in CI | |
| print('[CRITICAL-TEST] Testing primitive data operations...') | |
| from pyhelios import Context | |
| ctx = Context() | |
| patch_uuid = ctx.addPatch() | |
| print(f'[CRITICAL-TEST] Created patch UUID: {patch_uuid}') | |
| ctx.setPrimitiveDataInt(patch_uuid, 'test_int', 42) | |
| print('[CRITICAL-TEST] setPrimitiveDataInt completed') | |
| exists = ctx.doesPrimitiveDataExist(patch_uuid, 'test_int') | |
| print(f'[CRITICAL-TEST] doesPrimitiveDataExist: {exists}') | |
| if not exists: | |
| print('[CRITICAL-TEST] REPRODUCED: Primitive data does not exist after setting!') | |
| print('[CRITICAL-TEST] This is the bug that causes CI test failures') | |
| print('[CRITICAL-TEST] Running comprehensive diagnostic...') | |
| try: | |
| exec(open('{project}/test_ci_diagnostic.py').read()) | |
| except Exception as diag_e: | |
| print(f'[CRITICAL-TEST] Diagnostic failed: {diag_e}') | |
| print('[CRITICAL-TEST] Continuing to pytest to confirm...') | |
| else: | |
| value = ctx.getPrimitiveData(patch_uuid, 'test_int', int) | |
| print(f'[CRITICAL-TEST] Retrieved value: {value}') | |
| print('[CRITICAL-TEST] Primitive data operations working correctly in CI') | |
| print('[SUCCESS] cibuildwheel test completed successfully') | |
| except Exception as e: | |
| print(f'[FAILED] cibuildwheel test FAILED: {e}') | |
| import traceback | |
| traceback.print_exc() | |
| raise | |
| " && | |
| python -m pytest {project}/tests/ --tb=short -v -m "not requires_gpu" -s | |
| # Platform-specific test requirements (pytest-forked only works on Unix systems) | |
| CIBW_TEST_REQUIRES_LINUX: "pytest pytest-forked" | |
| CIBW_TEST_REQUIRES_MACOS: "pytest pytest-forked" | |
| CIBW_TEST_REQUIRES_WINDOWS: "pytest" | |
| # Skip problematic platforms for testing | |
| CIBW_TEST_SKIP: "*-win32 *-manylinux_i686 *-musllinux*" | |
| # Repair wheels to bundle dependencies | |
| CIBW_REPAIR_WHEEL_COMMAND_MACOS: "delocate-wheel --require-archs {delocate_archs} -w {dest_dir} -v {wheel}" | |
| CIBW_REPAIR_WHEEL_COMMAND_LINUX: "auditwheel repair -w {dest_dir} {wheel}" | |
| - name: Debug build failure | |
| if: failure() | |
| shell: bash | |
| run: | | |
| echo "=== Build Failure Diagnostics ===" | |
| echo "Build directory contents:" | |
| find pyhelios_build -name "*.so" -o -name "*.dll" -o -name "*.dylib" 2>/dev/null || echo "No build directory found" | |
| echo "" | |
| echo "Plugin directory contents:" | |
| ls -la pyhelios/plugins/ 2>/dev/null || echo "No plugins directory found" | |
| echo "" | |
| echo "Wheel directory contents:" | |
| ls -la wheelhouse/ 2>/dev/null || echo "No wheelhouse directory found" | |
| echo "" | |
| echo "Python environment:" | |
| python --version | |
| pip list | grep -E "(cibuildwheel|auditwheel|delocate)" || echo "Wheel tools not found" | |
| - name: Validate wheel contents | |
| if: always() # Run even if build partially failed | |
| shell: bash | |
| run: | | |
| echo "=== Wheel Content Validation ===" | |
| for wheel in wheelhouse/*.whl; do | |
| if [ -f "$wheel" ]; then | |
| echo "Validating: $(basename "$wheel")" | |
| # Check wheel contains both Python files and native libraries | |
| python .github/scripts/validate_wheel.py "$wheel" | |
| else | |
| echo "No wheels found to validate" | |
| fi | |
| done | |
| - name: Upload wheels as artifacts | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: wheels-${{ matrix.os }}-${{ matrix.arch }} | |
| path: wheelhouse/*.whl | |
| retention-days: 7 | |
| test_wheels: | |
| name: Test wheels on ${{ matrix.os }} Python ${{ matrix.python-version }} | |
| runs-on: ${{ matrix.os }} | |
| timeout-minutes: 30 | |
| needs: build_wheels | |
| strategy: | |
| matrix: | |
| include: | |
| - os: ubuntu-22.04 | |
| python-version: '3.8' | |
| - os: ubuntu-22.04 | |
| python-version: '3.11' | |
| - os: windows-2022 | |
| python-version: '3.8' | |
| - os: windows-2022 | |
| python-version: '3.11' | |
| - os: macos-14 | |
| python-version: '3.11' | |
| steps: | |
| - name: Checkout PyHelios for tests | |
| uses: actions/checkout@v4 | |
| - name: Set up Python ${{ matrix.python-version }} | |
| uses: actions/setup-python@v5 | |
| with: | |
| python-version: ${{ matrix.python-version }} | |
| - name: Download wheels | |
| uses: actions/download-artifact@v4 | |
| with: | |
| pattern: wheels-* | |
| merge-multiple: true | |
| path: wheelhouse | |
| - name: Install wheel and test dependencies | |
| shell: bash | |
| run: | | |
| python -m pip install --upgrade pip | |
| python -m pip install numpy pytest pyyaml | |
| # Install pytest-forked only on Unix systems (Windows doesn't support fork()) | |
| if [ "$RUNNER_OS" != "Windows" ]; then | |
| python -m pip install pytest-forked | |
| fi | |
| python -m pip install --no-index --find-links wheelhouse pyhelios3d | |
| - name: Test wheel functionality | |
| shell: bash | |
| run: | | |
| # Create cross-platform temporary directory for isolated testing | |
| if [ "$RUNNER_OS" == "Windows" ]; then | |
| ISOLATED_DIR="$RUNNER_TEMP/wheel_test" | |
| else | |
| ISOLATED_DIR="/tmp/wheel_test" | |
| fi | |
| mkdir -p "$ISOLATED_DIR" | |
| cd "$ISOLATED_DIR" | |
| echo "Testing wheel from isolated directory: $ISOLATED_DIR" | |
| echo "Current directory: $(pwd)" | |
| # Test wheel import in isolation (not contaminated by source code) | |
| python "$GITHUB_WORKSPACE/.github/scripts/test_wheel_import.py" | |
| # Copy complete test suite to isolated directory for clean wheel testing | |
| echo "Copying test suite to isolated directory for clean wheel testing" | |
| mkdir -p "$ISOLATED_DIR/tests" | |
| cp "$GITHUB_WORKSPACE/tests/test_"*.py "$ISOLATED_DIR/tests/" | |
| cp "$GITHUB_WORKSPACE/tests/conftest.py" "$ISOLATED_DIR/tests/" | |
| # Create platform-appropriate pytest.ini (remove --forked on Windows) | |
| if [ "$RUNNER_OS" != "Windows" ]; then | |
| cp "$GITHUB_WORKSPACE/pytest.ini" "$ISOLATED_DIR/" | |
| else | |
| # Create Windows-compatible pytest.ini without --forked | |
| sed '/--forked/d' "$GITHUB_WORKSPACE/pytest.ini" > "$ISOLATED_DIR/pytest.ini" 2>/dev/null || echo "# Wheel testing" > "$ISOLATED_DIR/pytest.ini" | |
| fi | |
| touch "$ISOLATED_DIR/tests/__init__.py" | |
| # Run pytest in isolated directory against copied test suite (tests real user import experience) | |
| cd "$ISOLATED_DIR" | |
| echo "Running pytest in isolated directory with copied test suite: $(pwd)" | |
| # Use forked execution on Unix systems for subprocess isolation (Windows doesn't support fork()) | |
| if [ "$RUNNER_OS" != "Windows" ]; then | |
| echo "Using forked execution for subprocess isolation" | |
| python -m pytest | |
| else | |
| echo "Using standard execution (Windows doesn't support fork())" | |
| python -m pytest | |
| fi | |
| start-gpu: | |
| name: Start GPU instance for wheel testing | |
| runs-on: ubuntu-latest | |
| needs: build_wheels | |
| if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') | |
| permissions: | |
| id-token: write | |
| contents: read | |
| steps: | |
| - uses: aws-actions/configure-aws-credentials@v2 | |
| with: | |
| role-to-assume: ${{ secrets.OIDC_ROLE_ARN }} | |
| aws-region: us-west-2 | |
| - run: | | |
| aws ec2 start-instances --instance-ids ${{ secrets.EC2_INSTANCE_ID_LINUX }} | |
| aws ec2 wait instance-running --instance-ids ${{ secrets.EC2_INSTANCE_ID_LINUX }} | |
| test_gpu_wheels: | |
| name: Test GPU wheels on self-hosted Linux runner | |
| runs-on: [self-hosted] | |
| needs: [build_wheels, start-gpu] | |
| if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') | |
| steps: | |
| - name: Checkout PyHelios for tests | |
| uses: actions/checkout@v4 | |
| - name: Set up Python | |
| uses: actions/setup-python@v5 | |
| with: | |
| python-version: '3.11' | |
| - name: Download wheels | |
| uses: actions/download-artifact@v4 | |
| with: | |
| pattern: wheels-* | |
| merge-multiple: true | |
| path: wheelhouse | |
| - name: Create fresh virtual environment and install wheel | |
| run: | | |
| set -euxo pipefail | |
| # Fresh venv | |
| VENV_DIR="/tmp/gpu_wheel_test_venv_$(date +%s)" | |
| echo "Creating fresh virtual environment: $VENV_DIR" | |
| python -m venv "$VENV_DIR" | |
| . "$VENV_DIR/bin/activate" | |
| echo "Installing dependencies in fresh environment" | |
| python -m pip install --upgrade pip | |
| python -m pip install numpy pytest pyyaml pytest-forked | |
| echo "Installing PyHelios from downloaded wheels" | |
| # IMPORTANT: use the workspace path explicitly while our CWD is NOT the workspace | |
| python -m pip install --no-index --find-links "$GITHUB_WORKSPACE/wheelhouse" pyhelios3d | |
| # Export the venv path early so later steps still have it even if a check fails | |
| echo "VENV_DIR=$VENV_DIR" >> "$GITHUB_ENV" | |
| # Verify from a completely isolated directory with isolated mode | |
| ISODIR="$(mktemp -d /tmp/pyhelios_iso.XXXXXX)" | |
| cd "$ISODIR" | |
| # Extra safety: clear PYTHONPATH and avoid user site | |
| export PYTHONPATH= | |
| export PYTHONNOUSERSITE=1 | |
| echo "=== Installation Verification (isolated) ===" | |
| "$VENV_DIR/bin/python" -I -c " | |
| import sys, importlib | |
| print(f'Python: {sys.executable}') | |
| print(f'sys.path[0]: {sys.path[0]!r}') | |
| import pyhelios | |
| print(f'PyHelios version: {pyhelios.__version__}') | |
| print(f'PyHelios location: {pyhelios.__file__}') | |
| if '/site-packages/' not in pyhelios.__file__.replace('\\\\','/'): | |
| raise SystemExit('FAIL: import resolved to non-site-packages location') | |
| print('OK: imported from wheel in site-packages') | |
| " | |
| - name: Test GPU wheel functionality | |
| run: | | |
| # CRITICAL: Move to completely isolated directory BEFORE any Python operations | |
| # This ensures Python cannot find source files in $GITHUB_WORKSPACE | |
| ISOLATED_DIR="/tmp/gpu_wheel_test_$(date +%s)" | |
| mkdir -p "$ISOLATED_DIR" | |
| cd "$ISOLATED_DIR" | |
| echo "=== Testing GPU wheel from completely isolated directory ===" | |
| echo "Isolated directory: $ISOLATED_DIR" | |
| echo "Current directory: $(pwd)" | |
| echo "Source directory (should not affect testing): $GITHUB_WORKSPACE" | |
| # Activate the fresh virtual environment AFTER changing directory | |
| source "$VENV_DIR/bin/activate" | |
| echo "Virtual environment: $VIRTUAL_ENV" | |
| echo "Python executable: $(which python)" | |
| # Clear ALL environment variables that could cause Python to find source files | |
| SOURCE_WORKSPACE_SAVE="$GITHUB_WORKSPACE" | |
| export SOURCE_WORKSPACE_SAVE | |
| unset GITHUB_WORKSPACE | |
| unset RUNNER_WORKSPACE | |
| unset GITHUB_WORKSPACE_PATH | |
| unset PYHELIOS_DEV_MODE | |
| unset PYHELIOS_BUILD_DIR | |
| # CRITICAL: Complete Python environment isolation (based on research) | |
| export PYTHONDONTWRITEBYTECODE=1 | |
| export PYTHONPATH="" # Critical: Clear PYTHONPATH completely | |
| export PYTHONNOUSERSITE=1 # Don't use user site-packages | |
| # Test wheel import using ABSOLUTE PATH to virtual environment Python | |
| # This bypasses Python's current directory import priority completely | |
| echo "=== Testing PyHelios import with complete isolation ===" | |
| "$VENV_DIR/bin/python" -I -c " | |
| import sys, os | |
| print(f'=== GPU Wheel Test Environment (Isolated Mode) ===') | |
| print(f'Python: {sys.executable}') | |
| print(f'Platform: {sys.platform}') | |
| print(f'Working directory: {os.getcwd()}') | |
| print(f'sys.path verification:') | |
| workspace = os.environ.get('SOURCE_WORKSPACE_SAVE') or os.environ.get('GITHUB_WORKSPACE') | |
| def is_contaminated(p: str) -> bool: | |
| if not p: | |
| return False | |
| # flag only if path is inside the actual repo checkout | |
| if workspace and p.startswith(workspace): | |
| return True | |
| # also flag if it’s directly under the repo name pattern inside _work | |
| return '/_work/PyHelios/PyHelios' in p | |
| source_contamination = [p for p in sys.path if is_contaminated(p)] | |
| if source_contamination: | |
| print(f'[FATAL] Source directory contamination detected: {source_contamination}') | |
| raise RuntimeError('Python import path contaminated with source directories') | |
| else: | |
| print('[SUCCESS] No source directory contamination in sys.path') | |
| try: | |
| import pyhelios | |
| print(f'[SUCCESS] PyHelios3D {pyhelios.__version__} imported successfully') | |
| # CRITICAL: Verify we imported from virtual environment, not source | |
| pyhelios_path = pyhelios.__file__ | |
| print(f'[INFO] PyHelios imported from: {pyhelios_path}') | |
| if '/site-packages/' not in pyhelios_path: | |
| print(f'[FATAL] PyHelios imported from source directory instead of virtual environment!') | |
| print(f'Expected path containing: /site-packages/') | |
| print(f'Actual path: {pyhelios_path}') | |
| raise RuntimeError('PyHelios imported from source directory - virtual environment isolation failed') | |
| else: | |
| print('[SUCCESS] PyHelios correctly imported from virtual environment site-packages') | |
| # Test native library functionality with focus on GPU plugins | |
| from pyhelios.plugins import get_plugin_info | |
| info = get_plugin_info() | |
| print(f'[SUCCESS] Platform: {info[\"platform\"]}') | |
| mock_mode = info.get('is_mock', True) | |
| print(f'[SUCCESS] Mock mode: {mock_mode}') | |
| if mock_mode: | |
| print('[FAILED] Mock mode detected - GPU wheel should have native libraries!') | |
| raise RuntimeError('Mock mode detected in GPU wheel testing - native libraries missing') | |
| else: | |
| lib_path = info.get('library_path', 'Unknown') | |
| print(f'[SUCCESS] Native library loaded: {lib_path}') | |
| # DETAILED PLUGIN DETECTION DEBUG - Check PlantArchitecture specifically | |
| print('[DEBUG] Performing detailed plugin detection...') | |
| from pyhelios.plugins.loader import get_loader | |
| loader = get_loader() | |
| library = loader.library | |
| # Test PlantArchitecture detection specifically | |
| plantarch_functions = ['createPlantArchitecture'] | |
| print(f'[DEBUG] Testing PlantArchitecture functions: {plantarch_functions}') | |
| for func_name in plantarch_functions: | |
| try: | |
| func = getattr(library, func_name) | |
| print(f'[DEBUG] ✓ Found function: {func_name} -> {func}') | |
| except AttributeError as e: | |
| print(f'[DEBUG] ✗ Missing function: {func_name} -> AttributeError: {e}') | |
| except Exception as e: | |
| print(f'[DEBUG] ✗ Error accessing function: {func_name} -> {type(e).__name__}: {e}') | |
| # Also test hasattr for comparison | |
| has_attr = hasattr(library, func_name) | |
| print(f'[DEBUG] hasattr(library, \"{func_name}\"): {has_attr}') | |
| # COMPREHENSIVE DEBUGGING: Check everything about the library and environment | |
| print(f'[DEBUG] Library file path: {lib_path}') | |
| if os.path.exists(lib_path): | |
| print(f'[DEBUG] Library file exists and size: {os.path.getsize(lib_path)} bytes') | |
| import subprocess | |
| # 1. Check if createPlantArchitecture symbol exists in the library file | |
| print('[DEBUG] Checking symbol table for createPlantArchitecture...') | |
| try: | |
| # Check dynamic symbols | |
| result = subprocess.run(['nm', '-D', lib_path], capture_output=True, text=True, timeout=15) | |
| if result.returncode == 0: | |
| plantarch_symbols = [line for line in result.stdout.split('\n') if 'createPlantArchitecture' in line] | |
| if plantarch_symbols: | |
| print(f'[DEBUG] ✓ Found createPlantArchitecture in dynamic symbols:') | |
| for sym in plantarch_symbols: | |
| print(f'[DEBUG] {sym}') | |
| else: | |
| print('[DEBUG] ✗ createPlantArchitecture NOT found in dynamic symbols') | |
| else: | |
| print(f'[DEBUG] nm -D failed: {result.stderr}') | |
| except Exception as e: | |
| print(f'[DEBUG] Could not run nm -D: {e}') | |
| # 2. Check all symbols (not just dynamic) | |
| try: | |
| result = subprocess.run(['nm', lib_path], capture_output=True, text=True, timeout=15) | |
| if result.returncode == 0: | |
| plantarch_symbols = [line for line in result.stdout.split('\n') if 'createPlantArchitecture' in line] | |
| if plantarch_symbols: | |
| print(f'[DEBUG] ✓ Found createPlantArchitecture in all symbols:') | |
| for sym in plantarch_symbols: | |
| print(f'[DEBUG] {sym}') | |
| else: | |
| print('[DEBUG] ✗ createPlantArchitecture NOT found in any symbols') | |
| else: | |
| print(f'[DEBUG] nm failed: {result.stderr}') | |
| except Exception as e: | |
| print(f'[DEBUG] Could not run nm: {e}') | |
| # 3. Check objdump for exported symbols | |
| try: | |
| result = subprocess.run(['objdump', '-T', lib_path], capture_output=True, text=True, timeout=15) | |
| if result.returncode == 0: | |
| plantarch_symbols = [line for line in result.stdout.split('\n') if 'createPlantArchitecture' in line] | |
| if plantarch_symbols: | |
| print(f'[DEBUG] ✓ Found createPlantArchitecture in objdump -T:') | |
| for sym in plantarch_symbols: | |
| print(f'[DEBUG] {sym}') | |
| else: | |
| print('[DEBUG] ✗ createPlantArchitecture NOT found in objdump -T') | |
| else: | |
| print(f'[DEBUG] objdump -T failed: {result.stderr}') | |
| except Exception as e: | |
| print(f'[DEBUG] Could not run objdump -T: {e}') | |
| # 4. Search for any PlantArchitecture-related symbols | |
| try: | |
| result = subprocess.run(['nm', lib_path], capture_output=True, text=True, timeout=15) | |
| if result.returncode == 0: | |
| plantarch_related = [line for line in result.stdout.split('\n') if 'PlantArchitecture' in line or 'plantarchitecture' in line.lower()] | |
| if plantarch_related: | |
| print(f'[DEBUG] Found {len(plantarch_related)} PlantArchitecture-related symbols:') | |
| for sym in plantarch_related[:10]: # First 10 only | |
| print(f'[DEBUG] {sym}') | |
| if len(plantarch_related) > 10: | |
| print(f'[DEBUG] ... and {len(plantarch_related) - 10} more') | |
| else: | |
| print('[DEBUG] ✗ NO PlantArchitecture-related symbols found') | |
| else: | |
| print(f'[DEBUG] nm search failed: {result.stderr}') | |
| except Exception as e: | |
| print(f'[DEBUG] Could not search for PlantArchitecture symbols: {e}') | |
| # 5. Check library dependencies using ldd | |
| try: | |
| result = subprocess.run(['ldd', lib_path], capture_output=True, text=True, timeout=10) | |
| if result.returncode == 0: | |
| print('[DEBUG] Library dependencies (ldd) - first 15 lines:') | |
| for line in result.stdout.strip().split('\n')[:15]: | |
| print(f'[DEBUG] {line}') | |
| else: | |
| print(f'[DEBUG] ldd failed: {result.stderr}') | |
| except Exception as e: | |
| print(f'[DEBUG] Could not run ldd: {e}') | |
| # 6. Check for missing dependencies | |
| try: | |
| result = subprocess.run(['ldd', lib_path], capture_output=True, text=True, timeout=10) | |
| if result.returncode == 0: | |
| missing_deps = [line for line in result.stdout.split('\n') if 'not found' in line] | |
| if missing_deps: | |
| print(f'[DEBUG] ⚠️ MISSING DEPENDENCIES FOUND:') | |
| for dep in missing_deps: | |
| print(f'[DEBUG] {dep}') | |
| else: | |
| print('[DEBUG] ✓ All dependencies resolved') | |
| else: | |
| print(f'[DEBUG] ldd dependency check failed: {result.stderr}') | |
| except Exception as e: | |
| print(f'[DEBUG] Could not check dependencies: {e}') | |
| # 7. Check file type and architecture | |
| try: | |
| result = subprocess.run(['file', lib_path], capture_output=True, text=True, timeout=10) | |
| if result.returncode == 0: | |
| print(f'[DEBUG] Library file type: {result.stdout.strip()}') | |
| else: | |
| print(f'[DEBUG] file command failed: {result.stderr}') | |
| except Exception as e: | |
| print(f'[DEBUG] Could not run file command: {e}') | |
| else: | |
| print('[DEBUG] Library file does not exist!') | |
| # 8. Environment debugging | |
| print('[DEBUG] === ENVIRONMENT ANALYSIS ===') | |
| print(f'[DEBUG] LD_LIBRARY_PATH: {os.environ.get(\"LD_LIBRARY_PATH\", \"NOT SET\")}') | |
| print(f'[DEBUG] PYTHONPATH: {os.environ.get(\"PYTHONPATH\", \"NOT SET\")}') | |
| print(f'[DEBUG] Current working directory: {os.getcwd()}') | |
| print(f'[DEBUG] Python sys.path first 5 entries:') | |
| import sys | |
| for i, path in enumerate(sys.path[:5]): | |
| print(f'[DEBUG] [{i}] {path}') | |
| # 9. Check system info | |
| try: | |
| with open('/proc/version', 'r') as f: | |
| print(f'[DEBUG] Kernel version: {f.read().strip()}') | |
| except: | |
| print('[DEBUG] Could not read kernel version') | |
| try: | |
| result = subprocess.run(['uname', '-a'], capture_output=True, text=True, timeout=5) | |
| if result.returncode == 0: | |
| print(f'[DEBUG] System info: {result.stdout.strip()}') | |
| except: | |
| print('[DEBUG] Could not get system info') | |
| # 10. Check GPU/CUDA environment | |
| try: | |
| result = subprocess.run(['nvidia-smi', '--query-gpu=name,driver_version', '--format=csv,noheader'], capture_output=True, text=True, timeout=10) | |
| if result.returncode == 0: | |
| print(f'[DEBUG] GPU info: {result.stdout.strip()}') | |
| else: | |
| print(f'[DEBUG] nvidia-smi failed or no GPU: {result.stderr}') | |
| except: | |
| print('[DEBUG] nvidia-smi not available') | |
| # 11. Check if CUDA libraries are interfering | |
| cuda_libs = ['/usr/local/cuda/lib64', '/usr/lib/x86_64-linux-gnu'] | |
| for cuda_path in cuda_libs: | |
| if os.path.exists(cuda_path): | |
| try: | |
| cuda_files = [f for f in os.listdir(cuda_path) if f.startswith('libcuda') and f.endswith('.so')] | |
| if cuda_files: | |
| print(f'[DEBUG] CUDA libraries in {cuda_path}: {cuda_files[:5]}') | |
| except: | |
| pass | |
| # 12. Test ctypes loading directly | |
| print('[DEBUG] === DIRECT CTYPES LOADING TEST ===') | |
| try: | |
| import ctypes | |
| direct_lib = ctypes.CDLL(lib_path) | |
| print('[DEBUG] ✓ Direct ctypes.CDLL loading succeeded') | |
| # Try to access the function directly | |
| try: | |
| func = direct_lib.createPlantArchitecture | |
| print(f'[DEBUG] ✓ Direct access to createPlantArchitecture: {func}') | |
| except AttributeError as e: | |
| print(f'[DEBUG] ✗ Direct access failed: {e}') | |
| # List some symbols we can access | |
| known_functions = ['createContext', 'destroyContext', 'getPrimitiveCount'] | |
| accessible_funcs = [] | |
| for fname in known_functions: | |
| try: | |
| func = getattr(direct_lib, fname) | |
| accessible_funcs.append(fname) | |
| except: | |
| pass | |
| print(f'[DEBUG] Accessible known functions: {accessible_funcs}') | |
| except Exception as e: | |
| print(f'[DEBUG] ✗ Direct ctypes loading failed: {e}') | |
| # Test GPU plugin availability (critical for GPU wheels) | |
| available_plugins = info.get('available_plugins', []) | |
| gpu_plugins = ['energybalance', 'radiation'] | |
| expected_plugins = ['weberpenntree', 'visualizer', 'photosynthesis', 'solarposition', 'stomatalconductance', 'plantarchitecture', 'energybalance', 'radiation'] | |
| print(f'[INFO] Available plugins: {sorted(available_plugins)}') | |
| print(f'[INFO] Expected plugins: {sorted(expected_plugins)}') | |
| print(f'[INFO] GPU plugins to test: {sorted(gpu_plugins)}') | |
| # Check for missing expected plugins | |
| missing_plugins = set(expected_plugins) - set(available_plugins) | |
| if missing_plugins: | |
| print(f'[WARNING] Missing expected plugins: {sorted(missing_plugins)}') | |
| if 'plantarchitecture' in missing_plugins: | |
| print('[CRITICAL] PlantArchitecture is missing! This is the bug we are investigating.') | |
| available_gpu = [p for p in gpu_plugins if p in available_plugins] | |
| if available_gpu: | |
| print(f'[SUCCESS] GPU plugins available: {sorted(available_gpu)}') | |
| # Test GPU functionality if radiation plugin is available | |
| if 'radiation' in available_gpu: | |
| print('[GPU-TEST] Testing radiation plugin...') | |
| from pyhelios import Context, RadiationModel | |
| ctx = Context() | |
| # Test basic radiation model creation | |
| with RadiationModel(ctx) as radiation: | |
| radiation.addRadiationBand(\"SW\") | |
| print('[GPU-TEST] RadiationModel created and band added successfully') | |
| else: | |
| print('[WARNING] No GPU plugins available - may be expected in containerized environment') | |
| print('[INFO] GPU wheels built successfully but GPU hardware may not be available') | |
| print('[SUCCESS] GPU wheel test completed successfully') | |
| except Exception as e: | |
| print(f'[FAILED] GPU wheel test FAILED: {e}') | |
| import traceback | |
| traceback.print_exc() | |
| raise | |
| " | |
| # Copy and run complete test suite for comprehensive validation | |
| echo "Copying complete test suite to isolated directory" | |
| # Use the saved workspace path (we saved it before unsetting env vars) | |
| : "${SOURCE_WORKSPACE_SAVE:?SOURCE_WORKSPACE_SAVE missing}" | |
| mkdir -p "$ISOLATED_DIR/tests" | |
| cp "$SOURCE_WORKSPACE_SAVE/tests/test_"*.py "$ISOLATED_DIR/tests/" | |
| cp "$SOURCE_WORKSPACE_SAVE/tests/conftest.py" "$ISOLATED_DIR/tests/" | |
| cp "$SOURCE_WORKSPACE_SAVE/pytest.ini" "$ISOLATED_DIR/" | |
| touch "$ISOLATED_DIR/tests/__init__.py" | |
| # Run complete test suite in isolated directory with fresh virtual environment | |
| cd "$ISOLATED_DIR" | |
| echo "Running complete test suite in isolated directory: $(pwd)" | |
| echo "Using virtual environment: $VIRTUAL_ENV" | |
| # Clear environment variables that might confuse PyHelios library loading | |
| unset GITHUB_WORKSPACE | |
| unset RUNNER_WORKSPACE | |
| unset PYHELIOS_DEV_MODE | |
| # Use proper environment isolation (research-backed best practice) | |
| export PYTHONDONTWRITEBYTECODE=1 | |
| export PYTHONPATH="" # Clear PYTHONPATH completely | |
| # Verify we're using the correct environment with isolated mode | |
| python -I -c " | |
| import sys | |
| print(f'Python executable: {sys.executable}') | |
| print(f'Virtual environment isolated mode sys.path length: {len(sys.path)}') | |
| " | |
| # Test PyHelios import in isolated mode | |
| python -I -c " | |
| import pyhelios | |
| print(f'PyHelios version: {pyhelios.__version__}') | |
| print(f'PyHelios location: {pyhelios.__file__}') | |
| " | |
| # Run pytest with isolated mode for complete source isolation | |
| python -I -m pytest tests/ --tb=short -v -s | |
| - name: Cleanup virtual environment | |
| if: always() # Run cleanup even if tests fail | |
| run: | | |
| if [ -n "$VENV_DIR" ] && [ -d "$VENV_DIR" ]; then | |
| echo "Cleaning up virtual environment: $VENV_DIR" | |
| rm -rf "$VENV_DIR" | |
| echo "Virtual environment cleanup completed" | |
| else | |
| echo "No virtual environment to clean up" | |
| fi | |
| stop-gpu: | |
| name: Stop GPU instance | |
| needs: test_gpu_wheels | |
| if: always() && (github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v')) | |
| runs-on: ubuntu-latest | |
| permissions: | |
| id-token: write | |
| contents: read | |
| steps: | |
| - uses: aws-actions/configure-aws-credentials@v2 | |
| with: | |
| role-to-assume: ${{ secrets.OIDC_ROLE_ARN }} | |
| aws-region: us-west-2 | |
| - run: | | |
| aws ec2 stop-instances --instance-ids ${{ secrets.EC2_INSTANCE_ID_LINUX }} | |
| aws ec2 wait instance-stopped --instance-ids ${{ secrets.EC2_INSTANCE_ID_LINUX }} | |
| publish: | |
| name: Publish to PyPI | |
| runs-on: ubuntu-latest | |
| needs: [build_wheels, test_wheels, test_gpu_wheels] | |
| if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') | |
| environment: | |
| name: pypi | |
| url: https://pypi.org/p/pyhelios3d | |
| permissions: | |
| id-token: write # Required for trusted publishing | |
| steps: | |
| - name: Download all wheels | |
| uses: actions/download-artifact@v4 | |
| with: | |
| pattern: wheels-* | |
| merge-multiple: true | |
| path: wheelhouse | |
| - name: Publish to PyPI | |
| uses: pypa/gh-action-pypi-publish@release/v1 | |
| with: | |
| packages-dir: wheelhouse/ | |
| verify-metadata: false # Skip metadata verification due to dynamic versioning |