fix: Update macOS SDKROOT path in Python bindings workflow #232
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| on: | |
| push: | |
| branches: | |
| - develop | |
| tags: | |
| - "v*" | |
| schedule: | |
| # Run nightly at 8 PM ET (midnight UTC during EST, 1 AM UTC during EDT) | |
| # Using 1 AM UTC to cover EDT (daylight saving time) | |
| - cron: '0 1 * * *' | |
| workflow_dispatch: | |
| inputs: | |
| publish_to_s3: | |
| description: "Force S3 publishing even when not on develop" | |
| required: false | |
| default: "false" | |
| skip_docker_trigger: | |
| description: "Skip downstream docker workflow trigger" | |
| required: false | |
| default: "false" | |
| jobs: | |
| description: "Comma-separated list of jobs to run (e.g. 'linux-build,windows-build'). Leave empty to run all." | |
| required: false | |
| default: "" | |
| workflow_call: | |
| inputs: | |
| publish_to_s3: | |
| type: string | |
| required: false | |
| default: "false" | |
| skip_docker_trigger: | |
| type: string | |
| required: false | |
| default: "false" | |
| jobs: | |
| type: string | |
| required: false | |
| default: "" | |
| concurrency: | |
| group: full-build-${{ github.ref }} | |
| cancel-in-progress: false | |
| permissions: | |
| contents: read | |
| actions: read | |
| checks: write | |
| pull-requests: write | |
| packages: write | |
| id-token: write | |
| env: | |
| GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
| BUILD_TYPE: Release | |
| OPENSTUDIO_BUILD: build | |
| OPENSTUDIO_SOURCE: OpenStudio | |
| PYTHON_REQUIRED_VERSION: "3.12.2" | |
| SDKROOT: /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk | |
| AWS_S3_BUCKET: openstudio-ci-builds | |
| TEST_DASHBOARD_RELATIVE: Testing/dashboard/test-dashboard.md | |
| CCACHE_SLOPPINESS: pch_defines,time_macros,include_file_mtime,include_file_ctime | |
| CCACHE_BASEDIR: ${{ github.workspace }} | |
| CCACHE_COMPRESS: "true" | |
| CCACHE_COMPRESSLEVEL: "3" | |
| CCACHE_MAXSIZE: "10G" | |
| CCACHE_DEPEND: "true" | |
| CCACHE_NOHASHDIR: "true" | |
| SCCACHE_GHA_ENABLED: "false" | |
| SCCACHE_DIR: "${{ github.workspace }}\\.sccache" | |
| SCCACHE_CACHE_SIZE: "10G" | |
| jobs: | |
| linux-build: | |
| name: Build ${{ matrix.pretty }} | |
| if: "!inputs.jobs || contains(inputs.jobs, 'linux-build')" | |
| runs-on: ${{ matrix.os }} | |
| container: | |
| image: ${{ matrix.container_image }} | |
| options: ${{ matrix.container_options }} --volume /mnt:/mnt | |
| strategy: | |
| fail-fast: false | |
| matrix: | |
| include: | |
| - platform: centos-9-x64 | |
| pretty: CentOS 9 (AlmaLinux) x64 | |
| os: ubuntu-22.04 | |
| container_image: nrel/openstudio-cmake-tools:almalinux9-main | |
| container_options: "--privileged -u root -e LANG=en_US.UTF-8" | |
| test_suffix: CentOS-9 | |
| pip_package: false | |
| docker_trigger: false | |
| upload_globs: | | |
| *.rpm | |
| *OpenStudio*x86_64.tar.gz | |
| cpack_generators: "RPM;TGZ" | |
| max_jobs: 4 | |
| exclude_regex: "^(BCLFixture.RemoteBCLMetaSearchTest|BCLFixture.BCLMeasure)$" | |
| - platform: ubuntu-2204-x64 | |
| pretty: Ubuntu 22.04 x64 | |
| os: ubuntu-22.04 | |
| container_image: nrel/openstudio-cmake-tools:jammy-main | |
| container_options: "--privileged -u root -e LANG=en_US.UTF-8" | |
| test_suffix: Ubuntu-2204 | |
| pip_package: true | |
| docker_trigger: true | |
| upload_globs: | | |
| *.deb | |
| *OpenStudio*x86_64.tar.gz | |
| cpack_generators: "DEB;TGZ" | |
| max_jobs: 3 | |
| exclude_regex: "^(ModelFixture.PythonPluginInstance_NotPYFile|BCLFixture.BCLMeasure)$" | |
| - platform: ubuntu-2404-x64 | |
| pretty: Ubuntu 24.04 x64 | |
| os: ubuntu-24.04 | |
| container_image: nrel/openstudio-cmake-tools:noble-main | |
| container_options: "--privileged -u root -e LANG=en_US.UTF-8" | |
| test_suffix: Ubuntu-2404 | |
| pip_package: false | |
| docker_trigger: false | |
| upload_globs: | | |
| *.deb | |
| *OpenStudio*x86_64.tar.gz | |
| cpack_generators: "DEB;TGZ" | |
| max_jobs: 4 | |
| exclude_regex: "^BCLFixture.BCLMeasure$" | |
| - platform: ubuntu-2204-arm64 | |
| pretty: Ubuntu 22.04 ARM64 | |
| os: ubuntu-22.04-arm | |
| container_image: nrel/openstudio-cmake-tools:jammy-main | |
| container_options: "--privileged -u root -e LANG=en_US.UTF-8" | |
| test_suffix: Ubuntu-2204-ARM64 | |
| pip_package: false | |
| docker_trigger: false | |
| upload_globs: | | |
| *.deb | |
| *OpenStudio*arm64.tar.gz | |
| cpack_generators: "DEB;TGZ" | |
| max_jobs: 3 | |
| exclude_regex: "^(GeometryFixture.Plane_RayIntersection|ISOModelFixture.SimModel|SqlFileFixture.AnnualTotalCosts|OpenStudioCLI.*test_measure_manager|BCLFixture.BCLMeasure)$" | |
| - platform: ubuntu-2404-arm64 | |
| pretty: Ubuntu 24.04 ARM64 | |
| os: ubuntu-24.04-arm | |
| container_image: nrel/openstudio-cmake-tools:noble-main | |
| container_options: "--privileged -u root -e LANG=en_US.UTF-8" | |
| test_suffix: Ubuntu-2404-ARM64 | |
| pip_package: false | |
| docker_trigger: false | |
| upload_globs: | | |
| *.deb | |
| *OpenStudio*arm64.tar.gz | |
| cpack_generators: "DEB;TGZ" | |
| max_jobs: 4 | |
| exclude_regex: "^(GeometryFixture.Plane_RayIntersection|ISOModelFixture.SimModel|SqlFileFixture.AnnualTotalCosts|OpenStudioCLI.*test_measure_manager|BCLFixture.BCLMeasure)$" | |
| defaults: | |
| run: | |
| shell: bash | |
| env: | |
| MAX_BUILD_THREADS: ${{ matrix.max_jobs }} | |
| CTEST_PARALLEL_LEVEL: ${{ matrix.max_jobs }} | |
| permissions: | |
| # Needed permission to upload the release asset | |
| contents: write | |
| steps: | |
| - name: Enable Swap and Verify space | |
| run: | | |
| echo "Enabling Swap..." | |
| # Create and enable 8GB swap file to prevent OOM kills | |
| dd if=/dev/zero of=/mnt/swapfile_extra bs=1M count=8192 status=progress | |
| chmod 600 /mnt/swapfile_extra | |
| mkswap /mnt/swapfile_extra | |
| swapon /mnt/swapfile_extra | |
| echo | |
| echo "Memory and swap:" | |
| # Check if free exists before running it, or ignore failure | |
| if command -v free >/dev/null 2>&1; then | |
| free -h | |
| else | |
| echo "free command not available" | |
| fi | |
| echo | |
| swapon --show || true | |
| echo | |
| echo "Available storage:" | |
| df -h || true | |
| echo | |
| - name: Checkout repository | |
| uses: actions/checkout@v6 | |
| with: | |
| fetch-depth: 1 | |
| - name: Restore ccache cache | |
| uses: actions/cache@v4 | |
| with: | |
| path: ~/.ccache | |
| key: ccache-${{ matrix.os }}-${{ matrix.platform }}-${{ hashFiles('conan.lock') }} | |
| restore-keys: | | |
| ccache-${{ matrix.os }}-${{ matrix.platform }}- | |
| - name: Restore Conan cache | |
| uses: actions/cache@v4 | |
| with: | |
| path: ~/.conan2 | |
| key: conan-${{ matrix.os }}-${{ matrix.platform }}-${{ hashFiles('conan.lock') }} | |
| restore-keys: | | |
| conan-${{ matrix.os }}-${{ matrix.platform }}- | |
| - name: Prepare workspace | |
| run: | | |
| set -euo pipefail | |
| # Git safe directory | |
| if command -v git >/dev/null 2>&1; then | |
| git config --global --add safe.directory '*' | |
| fi | |
| # Use /mnt for build and caches to avoid running out of space on root partition | |
| prepare_dir() { | |
| local target=$1 | |
| local dest=$2 | |
| mkdir -p "$dest" | |
| if [ -d "$target" ] && [ ! -L "$target" ]; then | |
| echo "Moving existing $target to $dest" | |
| cp -a "$target/." "$dest/" | |
| rm -rf "$target" | |
| fi | |
| mkdir -p "$(dirname "$target")" | |
| ln -sfn "$dest" "$target" | |
| } | |
| prepare_dir "$GITHUB_WORKSPACE/${{ env.OPENSTUDIO_BUILD }}" "/mnt/build" | |
| prepare_dir "$HOME/.ccache" "/mnt/.ccache" | |
| prepare_dir "$HOME/.conan2" "/mnt/.conan2" | |
| if command -v ccache >/dev/null 2>&1; then | |
| ccache -M ${{ env.CCACHE_MAXSIZE }} || true | |
| echo "Configured ccache:"; ccache -s | sed -n '1,10p' | |
| fi | |
| - name: Resolve build path | |
| id: build_path | |
| run: | | |
| # actions/upload-artifact@v4 does not follow symlinks at the start of a path. | |
| # We resolve the build directory to its real location to ensure globbing works. | |
| REAL_PATH=$(readlink -f "${{ env.OPENSTUDIO_BUILD }}") | |
| echo "path=$REAL_PATH" >> $GITHUB_OUTPUT | |
| - name: Fix CMake Path (CentOS) | |
| if: matrix.platform == 'centos-9-x64' | |
| run: | | |
| if [ -d /usr/local/cmake/bin ]; then | |
| echo "Adding /usr/local/cmake/bin to PATH" | |
| echo "/usr/local/cmake/bin" >> $GITHUB_PATH | |
| fi | |
| - name: Cache External Dependencies | |
| uses: actions/cache@v4 | |
| with: | |
| path: | | |
| ${{ env.OPENSTUDIO_BUILD }}/EnergyPlus*.tar.gz | |
| ${{ env.OPENSTUDIO_BUILD }}/EnergyPlus*.zip | |
| ${{ env.OPENSTUDIO_BUILD }}/radiance*.tar.gz | |
| ${{ env.OPENSTUDIO_BUILD }}/radiance*.zip | |
| ${{ env.OPENSTUDIO_BUILD }}/openstudio*gems*.tar.gz | |
| key: external-deps-${{ matrix.os }}-${{ hashFiles('conan.lock') }} | |
| restore-keys: | | |
| external-deps-${{ matrix.os }}- | |
| - name: Restore Generated Embedded Files | |
| uses: actions/cache@v4 | |
| with: | |
| path: | | |
| ${{ env.OPENSTUDIO_BUILD }}/src/*/embedded_files | |
| ${{ env.OPENSTUDIO_BUILD }}/ruby/engine/embedded_files | |
| key: embedded-files-${{ matrix.os }}-${{ hashFiles('resources/**', 'ruby/engine/**', 'src/airflow/**', 'src/energyplus/**', 'src/gbxml/**', 'src/isomodel/**', 'src/model/**', 'src/radiance/**', 'src/sdd/**', 'src/utilities/**') }} | |
| restore-keys: | | |
| embedded-files-${{ matrix.os }}- | |
| - name: Configure Conan remotes | |
| run: | | |
| set -euo pipefail | |
| conan remote remove conancenter || true | |
| conan remote add conancenter https://center2.conan.io | |
| conan remote remove nrel-v2 || true | |
| conan remote add nrel-v2 https://conan.openstudio.net/artifactory/api/conan/conan-v2 | |
| if [ ! -f "$HOME/.conan2/profiles/default" ]; then | |
| conan profile detect | |
| fi | |
| - name: Install CA Certificates | |
| if: startsWith(matrix.platform, 'ubuntu') | |
| run: | | |
| apt-get update || true | |
| apt-get install -y ca-certificates gnupg wget | |
| wget -O - https://apt.kitware.com/keys/kitware-archive-latest.asc 2>/dev/null | gpg --dearmor - | tee /etc/apt/trusted.gpg.d/kitware.gpg >/dev/null | |
| apt-get update | |
| - name: Conan install | |
| run: | | |
| set -euo pipefail | |
| conan install . \ | |
| --output-folder="${{ env.OPENSTUDIO_BUILD }}" \ | |
| --build=missing \ | |
| -c tools.cmake.cmaketoolchain:generator=Ninja \ | |
| -s compiler.cppstd=20 \ | |
| -s build_type=${{ env.BUILD_TYPE }} | |
| - name: Locate Ruby | |
| run: | | |
| ruby_path=$(command -v ruby) | |
| echo "SYSTEM_RUBY_PATH=$ruby_path" >> $GITHUB_ENV | |
| - name: Locate Python | |
| run: | | |
| python_path=$(command -v python3) | |
| echo "SYSTEM_PYTHON_PATH=$python_path" >> $GITHUB_ENV | |
| - name: Configure with CMake | |
| working-directory: ${{ env.OPENSTUDIO_BUILD }} | |
| run: | | |
| set -euo pipefail | |
| . ./conanbuild.sh | |
| # Use absolute path for ccache to avoid resolution issues in containers with symlinked build dirs | |
| CCACHE_ARGS=() | |
| if command -v ccache >/dev/null 2>&1; then | |
| CCACHE_EXE=$(command -v ccache) | |
| CCACHE_ARGS=("-DCMAKE_C_COMPILER_LAUNCHER=$CCACHE_EXE" "-DCMAKE_CXX_COMPILER_LAUNCHER=$CCACHE_EXE") | |
| fi | |
| cmake -G Ninja \ | |
| "${CCACHE_ARGS[@]}" \ | |
| -DCMAKE_TOOLCHAIN_FILE=conan_toolchain.cmake \ | |
| -DCMAKE_BUILD_TYPE:STRING=${{ env.BUILD_TYPE }} \ | |
| -DBUILD_TESTING:BOOL=ON \ | |
| -DCPACK_GENERATOR:STRING="${{ matrix.cpack_generators }}" \ | |
| -DBUILD_PYTHON_BINDINGS:BOOL=ON \ | |
| -DBUILD_PYTHON_PIP_PACKAGE:BOOL=${{ matrix.pip_package }} \ | |
| -DPython_EXECUTABLE:FILEPATH="$SYSTEM_PYTHON_PATH" \ | |
| -DPYTHON_VERSION:STRING=${{ env.PYTHON_REQUIRED_VERSION }} \ | |
| -DSYSTEM_RUBY_EXECUTABLE="$SYSTEM_RUBY_PATH" \ | |
| -DCMAKE_JOB_POOL_LINK:STRING=console \ | |
| "$GITHUB_WORKSPACE" | |
| - name: Build with Ninja | |
| working-directory: ${{ env.OPENSTUDIO_BUILD }} | |
| run: | | |
| set -euo pipefail | |
| . ./conanbuild.sh | |
| export NINJA_STATUS="[%f/%t | %es elapsed | %o objs/sec]" | |
| # Start resource monitor (records RSS samples for later summary) | |
| echo "timestamp PID RSS_KB COMM" > mem_samples.log | |
| ( while true; do | |
| sleep 60; | |
| stamp=$(date -u +"%Y-%m-%dT%H:%M:%SZ"); | |
| if command -v ps >/dev/null 2>&1; then ps -eo pid,rsz,comm --sort=-rsz | head -n 5 | awk -v s="$stamp" '{print s" "$1" "$2" "$3}' >> mem_samples.log; fi; | |
| done ) & | |
| HB_PID=$! | |
| cmake --build . --parallel ${{ matrix.max_jobs }} 2>&1 | tee build.log | |
| BUILD_EXIT=${PIPESTATUS[0]} | |
| kill $HB_PID || true | |
| command -v ninja >/dev/null 2>&1 && ninja -d stats || true | |
| exit $BUILD_EXIT | |
| - name: Run CTest suite | |
| id: ctest | |
| continue-on-error: true | |
| working-directory: ${{ env.OPENSTUDIO_BUILD }} | |
| run: | | |
| set -euo pipefail | |
| . ./conanbuild.sh | |
| # Conflicting tests that must run sequentially | |
| resource_locked_tests="ModelFixture.ScheduleFile|ModelFixture.ScheduleFileAltCtor|ModelFixture.PythonPluginInstance|ModelFixture.PythonPluginInstance_NotPYFile|ModelFixture.PythonPluginInstance_ClassNameValidation|ModelFixture.ChillerElectricASHRAE205_GettersSetters|ModelFixture.ChillerElectricASHRAE205_Loops|ModelFixture.ChillerElectricASHRAE205_NotCBORFile|ModelFixture.ChillerElectricASHRAE205_Clone" | |
| overall_exit_code=0 | |
| exclude_regex="${{ matrix.exclude_regex }}" | |
| echo "Running sequential tests..." | |
| export CTEST_OUTPUT_ON_FAILURE=1 | |
| if [ "$exclude_regex" == '""' ] || [ -z "$exclude_regex" ]; then | |
| ctest -C ${{ env.BUILD_TYPE }} -R "^($resource_locked_tests)$" -j 1 -T test || overall_exit_code=1 | |
| else | |
| ctest -C ${{ env.BUILD_TYPE }} -R "^($resource_locked_tests)$" -E "$exclude_regex" -j 1 -T test || overall_exit_code=1 | |
| fi | |
| echo "Running all other tests in parallel..." | |
| if [ "$exclude_regex" == '""' ] || [ -z "$exclude_regex" ]; then | |
| final_exclude="^($resource_locked_tests)$" | |
| else | |
| final_exclude="($exclude_regex|$resource_locked_tests)" | |
| fi | |
| ctest -C ${{ env.BUILD_TYPE }} -E "$final_exclude" -j ${{ matrix.max_jobs }} -T test || overall_exit_code=$? | |
| if [ $overall_exit_code -ne 0 ]; then | |
| echo "Rerunning failing tests..." | |
| ctest -C ${{ env.BUILD_TYPE }} --rerun-failed -T test --no-compress-output && overall_exit_code=0 || overall_exit_code=$? | |
| fi | |
| echo "exit_code=${overall_exit_code}" >> $GITHUB_OUTPUT | |
| - name: Wait for network stability | |
| if: always() | |
| run: sleep 5 | |
| - name: Upload build diagnostics | |
| if: always() | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: build-diag-${{ matrix.platform }}-${{ github.sha }} | |
| path: | | |
| ${{ steps.build_path.outputs.path }}/build.log | |
| ${{ steps.build_path.outputs.path }}/.ninja_log | |
| ${{ steps.build_path.outputs.path }}/CTestTestfile.cmake | |
| if-no-files-found: warn | |
| - name: Copy Testing tree with suffix | |
| if: always() | |
| working-directory: ${{ env.OPENSTUDIO_BUILD }} | |
| run: | | |
| set -euo pipefail | |
| if [ -d "Testing" ]; then | |
| cp -r Testing "Testing-${{ matrix.test_suffix }}" | |
| fi | |
| - name: Generate test summary | |
| if: always() | |
| working-directory: ${{ env.OPENSTUDIO_BUILD }} | |
| run: | | |
| set -euo pipefail | |
| # Generate a simple markdown summary from CTest results | |
| mkdir -p "$(dirname '${{ env.TEST_DASHBOARD_RELATIVE }}')" | |
| echo "# OpenStudio Test Results - ${{ matrix.test_suffix }}" > "${{ env.TEST_DASHBOARD_RELATIVE }}" | |
| echo "" >> "${{ env.TEST_DASHBOARD_RELATIVE }}" | |
| echo "**Build:** \`${{ github.sha }}\`" >> "${{ env.TEST_DASHBOARD_RELATIVE }}" | |
| echo "**Branch:** \`${{ github.ref_name }}\`" >> "${{ env.TEST_DASHBOARD_RELATIVE }}" | |
| echo "**Platform:** ${{ matrix.pretty }}" >> "${{ env.TEST_DASHBOARD_RELATIVE }}" | |
| echo "**Date:** $(date -u)" >> "${{ env.TEST_DASHBOARD_RELATIVE }}" | |
| echo "" >> "${{ env.TEST_DASHBOARD_RELATIVE }}" | |
| if [ -f Testing/Temporary/LastTest.log ]; then | |
| echo "## Test Log (Last 50 lines)" >> "${{ env.TEST_DASHBOARD_RELATIVE }}" | |
| echo '```' >> "${{ env.TEST_DASHBOARD_RELATIVE }}" | |
| tail -50 Testing/Temporary/LastTest.log >> "${{ env.TEST_DASHBOARD_RELATIVE }}" | |
| echo '```' >> "${{ env.TEST_DASHBOARD_RELATIVE }}" | |
| fi | |
| continue-on-error: true | |
| - name: Upload Testing artifact | |
| if: always() | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: Testing-${{ matrix.platform }}-${{ github.sha }} | |
| path: | | |
| ${{ env.OPENSTUDIO_BUILD }}/Testing-${{ matrix.test_suffix }}/ | |
| ${{ env.OPENSTUDIO_BUILD }}/${{ env.TEST_DASHBOARD_RELATIVE }} | |
| - name: Create packages | |
| if: ${{ success() && !cancelled() }} | |
| working-directory: ${{ env.OPENSTUDIO_BUILD }} | |
| run: | | |
| set -euo pipefail | |
| . ./conanbuild.sh | |
| cpack -C ${{ env.BUILD_TYPE }} | |
| - name: Cleanup intermediate files | |
| if: always() | |
| working-directory: ${{ env.OPENSTUDIO_BUILD }} | |
| run: | | |
| find . -name "*.o" -type f -delete || true | |
| df -h . | |
| - name: Fail job on test failures | |
| if: ${{ steps.ctest.outputs.exit_code != '0' }} | |
| run: | | |
| echo "::error::CTest suite failed with exit code ${{ steps.ctest.outputs.exit_code }}" | |
| exit 1 | |
| - name: Upload DEB installer | |
| if: contains(matrix.cpack_generators, 'DEB') | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: OS-Installers-${{ matrix.platform }}-DEB-${{ github.sha }} | |
| path: ${{ steps.build_path.outputs.path }}/*.deb | |
| if-no-files-found: ignore | |
| - name: Upload RPM installer | |
| if: contains(matrix.cpack_generators, 'RPM') | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: OS-Installers-${{ matrix.platform }}-RPM-${{ github.sha }} | |
| path: ${{ steps.build_path.outputs.path }}/*.rpm | |
| if-no-files-found: ignore | |
| - name: Upload TGZ installer | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: OS-Installers-${{ matrix.platform }}-TGZ-${{ github.sha }} | |
| path: ${{ steps.build_path.outputs.path }}/OpenStudio-*.tar.gz | |
| if-no-files-found: ignore | |
| - name: Upload WHEEL installer | |
| if: matrix.pip_package | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: OS-Installers-${{ matrix.platform }}-WHEEL-${{ github.sha }} | |
| path: ${{ steps.build_path.outputs.path }}/*.whl | |
| if-no-files-found: ignore | |
| linux-publish: | |
| name: Publish Linux Artifacts | |
| needs: [linux-build] | |
| runs-on: ubuntu-latest | |
| if: github.ref == 'refs/heads/develop' || github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/tags/v') || inputs.publish_to_s3 == 'true' || github.event.inputs.publish_to_s3 == 'true' | |
| steps: | |
| - name: Download all installers | |
| uses: actions/download-artifact@v4 | |
| with: | |
| pattern: OS-Installers-* | |
| merge-multiple: true | |
| path: installers | |
| - name: Configure AWS credentials | |
| uses: aws-actions/configure-aws-credentials@v4 | |
| with: | |
| aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
| aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
| aws-region: ${{ secrets.AWS_REGION || 'us-west-2' }} | |
| - name: Publish to S3 | |
| working-directory: installers | |
| env: | |
| S3_PREFIX: ${{ github.ref_type == 'tag' && format('releases/{0}', github.ref_name) || format('{0}', github.ref_name) }} | |
| run: | | |
| set -euo pipefail | |
| echo "Uploading artifacts to s3://${AWS_S3_BUCKET}/${S3_PREFIX}" | |
| for file in *; do | |
| [ -e "$file" ] || continue | |
| [ -f "$file" ] || continue | |
| filename=$(basename "$file") | |
| key="${S3_PREFIX}/${filename}" | |
| aws s3 cp "$file" "s3://${AWS_S3_BUCKET}/${key}" --acl public-read | |
| if command -v md5sum >/dev/null 2>&1; then md5sum "$file"; else md5 "$file"; fi | |
| done | |
| - name: Trigger Docker Build | |
| if: inputs.skip_docker_trigger != 'true' && github.event.inputs.skip_docker_trigger != 'true' | |
| working-directory: installers | |
| env: | |
| GH_TOKEN: ${{ secrets.DOCKER_OPENSTUDIO_PAT }} | |
| BRANCH_NAME: ${{ github.ref_name }} | |
| S3_PREFIX: ${{ github.ref_type == 'tag' && format('releases/{0}', github.ref_name) || format('{0}', github.ref_name) }} | |
| run: | | |
| set -euo pipefail | |
| # Find the 22.04 deb file locally | |
| DEB_FILE=$(find . -name "*22.04*.deb" | head -n 1) | |
| if [ -z "$DEB_FILE" ]; then | |
| echo "::error::Could not find Ubuntu 22.04 deb file in installers directory" | |
| ls -la | |
| exit 1 | |
| fi | |
| FILENAME=$(basename "$DEB_FILE") | |
| echo "Found file: $FILENAME" | |
| # Construct S3 URL | |
| # Replace + with %2B | |
| ENCODED_FILENAME=${FILENAME//+/%2B} | |
| BINARY_URL="https://${AWS_S3_BUCKET}.s3.amazonaws.com/${S3_PREFIX}/${ENCODED_FILENAME}" | |
| echo "Binary URL: $BINARY_URL" | |
| # Parse version from filename | |
| # Expected format: OpenStudio-<Version>-<Platform>.deb | |
| if [[ "$FILENAME" =~ OpenStudio-(.+)-Ubuntu-22\.04.*\.deb ]]; then | |
| OS_VERSION_FULL=${BASH_REMATCH[1]} | |
| elif [[ "$FILENAME" =~ OpenStudio-(.+)-Linux.*\.deb ]]; then | |
| OS_VERSION_FULL=${BASH_REMATCH[1]} | |
| else | |
| echo "::error::Could not parse version from filename: $FILENAME" | |
| exit 1 | |
| fi | |
| echo "Full Version: $OS_VERSION_FULL" | |
| # Logic from Jenkins: | |
| # Split by + | |
| IFS='+' read -r VER_PART SHA_PART <<< "$OS_VERSION_FULL" | |
| if [[ "$VER_PART" == *"-"* ]]; then | |
| # 3.3.0-rc1 -> Ver: 3.3.0, Ext: rc1 | |
| IFS='-' read -r OS_VERSION OS_VERSION_EXT <<< "$VER_PART" | |
| else | |
| # 3.3.0+sha -> Ver: 3.3.0, Ext: sha | |
| OS_VERSION="$VER_PART" | |
| OS_VERSION_EXT="$SHA_PART" | |
| fi | |
| echo "OS Version: $OS_VERSION" | |
| echo "OS Version Ext: $OS_VERSION_EXT" | |
| # Docker Tag Logic | |
| if [[ "$BRANCH_NAME" == "develop" ]]; then | |
| DOCKER_IMAGE_TAG="develop" | |
| else | |
| DOCKER_IMAGE_TAG="${OS_VERSION}-${OS_VERSION_EXT}" | |
| fi | |
| echo "Docker Image Tag: $DOCKER_IMAGE_TAG" | |
| # Trigger Workflow | |
| echo "Triggering manual_update_develop workflow in NREL/docker-openstudio..." | |
| gh workflow run 'manual_update_develop' \ | |
| --repo NREL/docker-openstudio \ | |
| --ref develop \ | |
| -f docker_image_tag="$DOCKER_IMAGE_TAG" \ | |
| -f os_installer_link="$BINARY_URL" \ | |
| -f os_version="$OS_VERSION" \ | |
| -f os_version_ext="$OS_VERSION_EXT" | |
| macos-build: | |
| name: Build Packages for ${{ matrix.pretty }} | |
| if: "!inputs.jobs || contains(inputs.jobs, 'macos-build')" | |
| runs-on: ${{ matrix.os }} | |
| timeout-minutes: 720 | |
| continue-on-error: ${{ matrix.allow_failure }} | |
| strategy: | |
| fail-fast: false | |
| matrix: | |
| macos_dev_target: ["12.1", "13.0"] | |
| include: | |
| - macos_dev_target: "12.1" | |
| platform: macos-x64 | |
| pretty: "macOS x64" | |
| os: macos-15-intel | |
| allow_failure: false | |
| test_suffix: macOS-x64 | |
| arch: x86_64 | |
| python-arch: x64 | |
| dmg_glob: "*.dmg" | |
| tar_glob: "*OpenStudio*x86_64.tar.gz" | |
| exclude_regex: "^BCLFixture.BCLMeasure$" | |
| max_jobs: 3 | |
| - macos_dev_target: "13.0" | |
| platform: macos-arm64 | |
| pretty: "macOS ARM64" | |
| os: macos-15 | |
| allow_failure: false | |
| test_suffix: macOS-arm64 | |
| arch: arm64 | |
| python-arch: arm64 | |
| dmg_glob: "*.dmg" | |
| tar_glob: "*OpenStudio*arm64.tar.gz" | |
| exclude_regex: "^(GeometryFixture.Plane_RayIntersection|ISOModelFixture.SimModel|BCLFixture.BCLMeasure)$" | |
| max_jobs: 3 | |
| env: | |
| MAX_BUILD_THREADS: ${{ matrix.max_jobs }} | |
| CTEST_PARALLEL_LEVEL: ${{ matrix.max_jobs }} | |
| permissions: | |
| # Needed permission to upload the release asset | |
| contents: write | |
| steps: | |
| - name: Checkout repository | |
| uses: actions/checkout@v6 | |
| with: | |
| path: ${{ env.OPENSTUDIO_SOURCE }} | |
| fetch-depth: 1 | |
| - name: Verify space | |
| shell: bash | |
| run: | | |
| echo "Memory and swap:" | |
| if command -v vm_stat >/dev/null 2>&1; then | |
| vm_stat | |
| fi | |
| echo | |
| sysctl vm.swapusage || true | |
| echo | |
| echo "Available storage:" | |
| df -h . || true | |
| echo | |
| - name: Git safe directory | |
| working-directory: ${{ env.OPENSTUDIO_SOURCE }} | |
| run: git config --global --add safe.directory '*' | |
| - name: Setup ccache | |
| uses: hendrikmuhs/ccache-action@v1.2 | |
| with: | |
| key: ccache-${{ matrix.os }}-${{ matrix.platform }}-${{ hashFiles('conan.lock') }} | |
| max-size: ${{ env.CCACHE_MAXSIZE }} | |
| - name: Restore Conan cache | |
| uses: actions/cache@v4 | |
| with: | |
| path: ~/.conan2 | |
| key: conan-${{ matrix.os }}-${{ matrix.platform }}-${{ hashFiles('conan.lock') }} | |
| restore-keys: | | |
| conan-${{ matrix.os }}-${{ matrix.platform }}- | |
| - name: Remove python ${{ env.PYTHON_REQUIRED_VERSION }} from the toolcache | |
| run: | | |
| ls $RUNNER_TOOL_CACHE/Python || true | |
| rm -Rf "$RUNNER_TOOL_CACHE/Python/${{ env.PYTHON_REQUIRED_VERSION }}" | |
| rm -Rf "$RUNNER_TOOL_CACHE/Python/${{ env.PYTHON_REQUIRED_VERSION }}*/" | |
| - name: Set up Python ${{ env.PYTHON_REQUIRED_VERSION }} | |
| id: setup-python | |
| uses: jmarrec/setup-python@v5.4.0 | |
| with: | |
| python-version: ${{ env.PYTHON_REQUIRED_VERSION }} | |
| # check-latest: true # Force pick up the python I built instead of the (potential) toolcache one. I could also do `rm -Rf $RUNNER_TOOL_CACHE/Python/3.12.2` before this action | |
| - name: Install Python dependencies | |
| working-directory: ${{ env.OPENSTUDIO_SOURCE }} | |
| run: | | |
| python -m pip install --upgrade pip | |
| pip install setuptools wheel | |
| pip install -r python/requirements.txt | |
| pip install conan aqtinstall | |
| - name: Install Ruby | |
| uses: ruby/setup-ruby@v1 | |
| with: | |
| ruby-version: '3.2.2' | |
| bundler-cache: true | |
| - name: Set up Qt IFW | |
| run: | | |
| set -euo pipefail | |
| cd $RUNNER_TEMP | |
| mkdir QtIFW && cd QtIFW | |
| aria2c https://github.com/jmarrec/QtIFW-fixup/releases/download/v5.0.0-dev-with-fixup/QtIFW-5.0.0-${{ matrix.arch }}.zip | |
| xattr -r -d com.apple.quarantine ./QtIFW-5.0.0-${{ matrix.arch }}.zip | |
| unzip QtIFW-5.0.0-${{ matrix.arch }}.zip | |
| rm -Rf ./*.zip | |
| chmod +x * | |
| ./installerbase --version | |
| echo "$(pwd)" >> $GITHUB_PATH | |
| - name: Install System dependencies | |
| shell: bash | |
| run: | | |
| # The MACOSX_DEPLOYMENT_TARGET environment variable sets the default value for the CMAKE_OSX_DEPLOYMENT_TARGET variable. | |
| # We use cmake commands to build some subprojects, so setting it globally | |
| echo MACOSX_DEPLOYMENT_TARGET=${{ matrix.macos_dev_target }} >> $GITHUB_ENV | |
| brew install ninja | |
| - name: Create Build Directory | |
| run: cmake -E make_directory ${{ env.OPENSTUDIO_BUILD }} | |
| - name: Cache External Dependencies | |
| uses: actions/cache@v4 | |
| with: | |
| path: | | |
| ${{ env.OPENSTUDIO_BUILD }}/EnergyPlus*.tar.gz | |
| ${{ env.OPENSTUDIO_BUILD }}/EnergyPlus*.zip | |
| ${{ env.OPENSTUDIO_BUILD }}/radiance*.tar.gz | |
| ${{ env.OPENSTUDIO_BUILD }}/radiance*.zip | |
| ${{ env.OPENSTUDIO_BUILD }}/openstudio*gems*.tar.gz | |
| key: external-deps-${{ matrix.os }}-${{ hashFiles('conan.lock') }} | |
| restore-keys: | | |
| external-deps-${{ matrix.os }}- | |
| - name: Restore Generated Embedded Files | |
| uses: actions/cache@v4 | |
| with: | |
| path: | | |
| ${{ env.OPENSTUDIO_BUILD }}/src/*/embedded_files | |
| ${{ env.OPENSTUDIO_BUILD }}/ruby/engine/embedded_files | |
| key: embedded-files-${{ matrix.os }}-${{ hashFiles('resources/**', 'ruby/engine/**', 'src/airflow/**', 'src/energyplus/**', 'src/gbxml/**', 'src/isomodel/**', 'src/radiance/**', 'src/sdd/**', 'src/model/**', 'src/utilities/**') }} | |
| restore-keys: | | |
| embedded-files-${{ matrix.os }}- | |
| - name: Configure Conan remotes | |
| run: | | |
| set -euo pipefail | |
| conan remote remove nrel-v2 || true | |
| conan remote add --index 0 nrel-v2 https://conan.openstudio.net/artifactory/api/conan/conan-v2 | |
| conan remote remove conancenter || true | |
| conan remote add conancenter https://center2.conan.io | |
| if [ ! -f "$HOME/.conan2/profiles/default" ]; then | |
| conan profile detect | |
| fi | |
| conan config home | |
| - name: Conan install | |
| working-directory: ${{ env.OPENSTUDIO_SOURCE }} | |
| run: | | |
| set -euo pipefail | |
| CMAKE_POLICY_VERSION_MINIMUM=3.5 conan install . \ | |
| --output-folder=../${{ env.OPENSTUDIO_BUILD }} \ | |
| --build=missing \ | |
| -c tools.cmake.cmaketoolchain:generator=Ninja \ | |
| -c tools.build:cxxflags='["-Wno-enum-constexpr-conversion", "-D_LIBCPP_ENABLE_CXX17_REMOVED_UNARY_BINARY_FUNCTION"]' \ | |
| -s compiler.cppstd=20 \ | |
| -s build_type=${{ env.BUILD_TYPE }} \ | |
| -s os.version=${{ matrix.macos_dev_target }} \ | |
| -o readline/*:with_library=termcap | |
| - name: Locate Ruby | |
| run: | | |
| ruby_path=$(command -v ruby) | |
| echo "SYSTEM_RUBY_PATH=$ruby_path" >> $GITHUB_ENV | |
| - name: Locate Python | |
| run: | | |
| python_path=$(command -v python3) | |
| echo "SYSTEM_PYTHON_PATH=$python_path" >> $GITHUB_ENV | |
| - name: Install Documentation Dependencies (Mac) | |
| run: | | |
| brew install --cask basictex | |
| echo "/Library/TeX/texbin" >> $GITHUB_PATH | |
| - name: Configure with CMake | |
| working-directory: ${{ env.OPENSTUDIO_BUILD }} | |
| env: | |
| APPLE_DEV_ID: ${{ secrets.APPLE_DEV_ID }} | |
| run: | | |
| set -e | |
| chmod +x ./conanbuild.sh | |
| . ./conanbuild.sh | |
| # Use absolute path for ccache to avoid resolution issues in containers with symlinked build dirs | |
| CCACHE_ARGS=() | |
| if command -v ccache >/dev/null 2>&1; then | |
| CCACHE_EXE=$(command -v ccache) | |
| CCACHE_ARGS=("-DCMAKE_C_COMPILER_LAUNCHER=$CCACHE_EXE" "-DCMAKE_CXX_COMPILER_LAUNCHER=$CCACHE_EXE") | |
| fi | |
| # Configure signing identity | |
| SIGNING_ARGS=() | |
| if [ -n "$APPLE_DEV_ID" ]; then | |
| echo "Configuring for signed build" | |
| SIGNING_ARGS=("-DCPACK_IFW_PACKAGE_SIGNING_IDENTITY=$APPLE_DEV_ID") | |
| else | |
| echo "Configuring for ad-hoc signed build (skipping CPack signing identity)" | |
| SIGNING_ARGS=() | |
| fi | |
| cmake -G Ninja \ | |
| "${CCACHE_ARGS[@]}" \ | |
| "${SIGNING_ARGS[@]}" \ | |
| -DCMAKE_TOOLCHAIN_FILE=conan_toolchain.cmake \ | |
| -DCMAKE_CXX_FLAGS="-D_LIBCPP_ENABLE_CXX17_REMOVED_UNARY_BINARY_FUNCTION -Wno-enum-constexpr-conversion" \ | |
| -DENABLE_COVERAGE:BOOL=OFF \ | |
| -DCMAKE_OSX_DEPLOYMENT_TARGET:STRING=${{ matrix.macos_dev_target }} \ | |
| -DCMAKE_BUILD_TYPE:STRING=${{ env.BUILD_TYPE }} \ | |
| -DBUILD_TESTING:BOOL=ON \ | |
| -DCPACK_BINARY_TGZ:BOOL=ON \ | |
| -DCPACK_BINARY_IFW:BOOL=ON \ | |
| -DBUILD_PYTHON_BINDINGS:BOOL=ON \ | |
| -DBUILD_PYTHON_PIP_PACKAGE:BOOL=OFF \ | |
| -DPYTHON_VERSION:STRING=${{ env.PYTHON_REQUIRED_VERSION }} \ | |
| -DPython_ROOT_DIR:PATH="$(dirname $(dirname $SYSTEM_PYTHON_PATH))" \ | |
| -DCMAKE_JOB_POOL_LINK:STRING=console \ | |
| ../${{ env.OPENSTUDIO_SOURCE }} | |
| - name: Build with Ninja | |
| working-directory: ${{ env.OPENSTUDIO_BUILD }} | |
| run: | | |
| . ./conanbuild.sh | |
| export NINJA_STATUS="[%f/%t | %es elapsed | %o objs/sec]" | |
| while true; do | |
| sleep 300 | |
| echo "[heartbeat] $(date -u +"%H:%M:%S")" | |
| if command -v top >/dev/null 2>&1; then top -l 1 -s 0 | grep PhysMem || true; fi | |
| df -h . | tail -1 | awk '{print "[disk] used=" $3 "/" $2 " (" $5 ")"}' | |
| if command -v ps >/dev/null 2>&1; then ps -eo pid,pmem,rss,comm | sort -rn -k2 | head -n 5; fi | |
| done & | |
| heartbeat_pid=$! | |
| cmake --build . --parallel ${MAX_BUILD_THREADS} 2>&1 | tee build.log | |
| build_exit=${PIPESTATUS[0]} | |
| kill $heartbeat_pid || true | |
| command -v ninja >/dev/null 2>&1 && ninja -d stats || true | |
| if [ -f build.log ]; then tail -n 40 build.log; fi | |
| exit $build_exit | |
| - name: Wait for network stability | |
| if: always() | |
| run: sleep 5 | |
| - name: Upload build diagnostics | |
| if: always() | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: build-diag-${{ matrix.platform }}-${{ github.sha }} | |
| path: | | |
| ${{ env.OPENSTUDIO_BUILD }}/build.log | |
| ${{ env.OPENSTUDIO_BUILD }}/.ninja_log | |
| ${{ env.OPENSTUDIO_BUILD }}/CTestTestfile.cmake | |
| if-no-files-found: warn | |
| - name: Run CTest suite | |
| id: mac_ctest | |
| working-directory: ${{ env.OPENSTUDIO_BUILD }} | |
| shell: bash | |
| continue-on-error: true | |
| run: | | |
| set -euo pipefail | |
| gem install bundler -v 2.4.10 --conservative --no-document | |
| # Conflicting tests that must run sequentially | |
| resource_locked_tests="ModelFixture.ScheduleFile|ModelFixture.ScheduleFileAltCtor|ModelFixture.PythonPluginInstance|ModelFixture.PythonPluginInstance_NotPYFile|ModelFixture.PythonPluginInstance_ClassNameValidation|ModelFixture.ChillerElectricASHRAE205_GettersSetters|ModelFixture.ChillerElectricASHRAE205_Loops|ModelFixture.ChillerElectricASHRAE205_NotCBORFile|ModelFixture.ChillerElectricASHRAE205_Clone" | |
| overall_exit_code=0 | |
| exclude_regex="${{ matrix.exclude_regex }}" | |
| echo "Running sequential tests..." | |
| if [ "$exclude_regex" == '""' ] || [ -z "$exclude_regex" ]; then | |
| ctest -C ${{ env.BUILD_TYPE }} -R "^($resource_locked_tests)$" -j 1 -T test || overall_exit_code=1 | |
| else | |
| ctest -C ${{ env.BUILD_TYPE }} -R "^($resource_locked_tests)$" -E "$exclude_regex" -j 1 -T test || overall_exit_code=1 | |
| fi | |
| echo "Running all other tests in parallel..." | |
| export CTEST_OUTPUT_ON_FAILURE=1 | |
| export CTEST_PARALLEL_LEVEL=${{ matrix.max_jobs }} | |
| if [ -n "$exclude_regex" ] && [ "$exclude_regex" != '""' ]; then | |
| final_exclude="($exclude_regex|$resource_locked_tests)" | |
| else | |
| final_exclude="^($resource_locked_tests)$" | |
| fi | |
| ctest -C ${{ env.BUILD_TYPE }} -E "$final_exclude" -T test || overall_exit_code=$? | |
| if [ $overall_exit_code -ne 0 ]; then | |
| echo "Rerunning failing tests..." | |
| ctest -C ${{ env.BUILD_TYPE }} --rerun-failed -T test --no-compress-output --output-on-failure && overall_exit_code=0 || overall_exit_code=$? | |
| fi | |
| echo "exit_code=${overall_exit_code}" >> $GITHUB_OUTPUT | |
| - name: Setup Keychain | |
| if: success() && (github.ref == 'refs/heads/develop' || github.ref == 'refs/heads/master' || inputs.publish_to_s3 == 'true' || github.event.inputs.publish_to_s3 == 'true' ) | |
| env: | |
| APPLE_CERT_DATA: ${{ secrets.APPLE_CERT_DATA }} | |
| APPLE_CERT_PASSWORD: ${{ secrets.APPLE_CERT_PASSWORD }} | |
| run: | | |
| set -euo pipefail | |
| if [ -n "$APPLE_CERT_DATA" ] && [ -n "$APPLE_CERT_PASSWORD" ]; then | |
| # Create temporary keychain | |
| KEYCHAIN_PATH="$RUNNER_TEMP/build.keychain" | |
| KEYCHAIN_PASSWORD=$(openssl rand -base64 32) | |
| security create-keychain -p "$KEYCHAIN_PASSWORD" "$KEYCHAIN_PATH" | |
| security set-keychain-settings -lut 21600 "$KEYCHAIN_PATH" | |
| security unlock-keychain -p "$KEYCHAIN_PASSWORD" "$KEYCHAIN_PATH" | |
| # Import certificate | |
| CERT_PATH="$RUNNER_TEMP/certificate.p12" | |
| echo "$APPLE_CERT_DATA" | base64 --decode > "$CERT_PATH" | |
| security import "$CERT_PATH" -k "$KEYCHAIN_PATH" -P "$APPLE_CERT_PASSWORD" -T /usr/bin/codesign | |
| security set-key-partition-list -S apple-tool:,apple: -s -k "$KEYCHAIN_PASSWORD" "$KEYCHAIN_PATH" | |
| echo "KEYCHAIN_PATH=$KEYCHAIN_PATH" >> $GITHUB_ENV | |
| fi | |
| - name: Create packages | |
| if: ${{ success() && !cancelled() }} | |
| working-directory: ${{ env.OPENSTUDIO_BUILD }} | |
| env: | |
| COPYFILE_DISABLE: 1 | |
| run: | | |
| set -euo pipefail | |
| . ./conanbuild.sh | |
| cpack -C ${{ env.BUILD_TYPE }} | |
| - name: Cleanup intermediate files | |
| if: always() | |
| working-directory: ${{ env.OPENSTUDIO_BUILD }} | |
| run: | | |
| find . -name "*.o" -type f -delete || true | |
| df -h . | |
| - name: Ad-hoc Sign Inner Installer (Fix "Killed" Error) | |
| if: ${{ success() && !cancelled() }} | |
| working-directory: ${{ env.OPENSTUDIO_BUILD }} | |
| env: | |
| # Check if we have a real ID; if not, we must patch the installer | |
| APPLE_DEV_ID: ${{ secrets.APPLE_DEV_ID }} | |
| run: | | |
| set -euo pipefail | |
| # Only run this fix if we DO NOT have a valid Developer ID. | |
| if [ -n "$APPLE_DEV_ID" ]; then | |
| echo "Valid Developer ID detected. Skipping ad-hoc patch." | |
| exit 0 | |
| fi | |
| echo "No Developer ID found. Patching DMGs with ad-hoc signature..." | |
| # Loop through all generated DMGs | |
| find . -maxdepth 1 -name "*.dmg" -print0 | while IFS= read -r -d '' dmg_file; do | |
| ../${{ env.OPENSTUDIO_SOURCE }}/developer/scripts/patch_adhoc_dmg.sh "$dmg_file" | |
| done | |
| - name: Sign DMG and Notarize | |
| if: ${{ steps.create_packages.outcome == 'success' && (github.ref == 'refs/heads/develop' || inputs.publish_to_s3 == 'true' || github.event.inputs.publish_to_s3 == 'true' || matrix.os == 'macOS') }} | |
| working-directory: ${{ env.OPENSTUDIO_BUILD }} | |
| env: | |
| APPLE_CERT_DATA: ${{ secrets.APPLE_CERT_DATA }} | |
| APPLE_DEV_ID: ${{ secrets.APPLE_DEV_ID }} | |
| APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }} | |
| APPLE_ID_USERNAME: ${{ secrets.APPLE_ID_USERNAME }} | |
| APPLE_ID_PASSWORD: ${{ secrets.APPLE_ID_PASSWORD }} | |
| run: | | |
| set -euo pipefail | |
| # Check if signing credentials are configured | |
| if [ -z "$APPLE_CERT_DATA" ] || [ -z "$APPLE_DEV_ID" ]; then | |
| echo "::warning::Apple signing certificates not configured. Falling back to ad-hoc signing." | |
| export AD_HOC_SIGNING=true | |
| else | |
| export AD_HOC_SIGNING=false | |
| fi | |
| # Sign DMG files | |
| mkdir -p signed | |
| for dmg in ${{ matrix.dmg_glob }}; do | |
| if [ -f "$dmg" ]; then | |
| echo "Processing $dmg..." | |
| # The inner app should already be signed by CPack if configured correctly | |
| # We now sign the DMG itself (or ad-hoc sign if needed) | |
| echo "Signing $dmg..." | |
| if [ "$AD_HOC_SIGNING" = "false" ]; then | |
| codesign --force --sign "$APPLE_DEV_ID" --timestamp --options runtime "$dmg" || { | |
| echo "::warning::Failed to sign $dmg" | |
| cp "$dmg" "signed/$(basename "$dmg")" | |
| continue | |
| } | |
| else | |
| echo "Applying simple ad-hoc signature to $dmg" | |
| codesign --force --sign - "$dmg" || { | |
| echo "::warning::Failed to ad-hoc sign $dmg" | |
| cp "$dmg" "signed/$(basename "$dmg")" | |
| continue | |
| } | |
| fi | |
| # Notarize if credentials available (Skip for ad-hoc) | |
| if [ "$AD_HOC_SIGNING" = "false" ] && [ -n "$APPLE_ID_USERNAME" ] && [ -n "$APPLE_ID_PASSWORD" ]; then | |
| echo "Notarizing $dmg..." | |
| xcrun notarytool submit "$dmg" \ | |
| --apple-id "$APPLE_ID_USERNAME" \ | |
| --password "$APPLE_ID_PASSWORD" \ | |
| --team-id "$APPLE_TEAM_ID" \ | |
| --wait || echo "::warning::Notarization failed for $dmg" | |
| # Staple the notarization ticket | |
| xcrun stapler staple "$dmg" || echo "::warning::Stapling failed for $dmg" | |
| elif [ "$AD_HOC_SIGNING" = "true" ]; then | |
| echo "Skipping notarization due to ad-hoc signing." | |
| fi | |
| cp "$dmg" "signed/$(basename "$dmg")" | |
| fi | |
| done | |
| # Cleanup | |
| if [ -n "$KEYCHAIN_PATH" ]; then | |
| security delete-keychain "$KEYCHAIN_PATH" || true | |
| fi | |
| echo "Code signing completed" | |
| - name: Copy Testing tree with suffix | |
| if: always() | |
| working-directory: ${{ env.OPENSTUDIO_BUILD }} | |
| run: | | |
| set -euo pipefail | |
| if [ -d "Testing" ]; then | |
| cp -r Testing "Testing-${{ matrix.test_suffix }}" | |
| fi | |
| - name: Generate test summary | |
| if: always() | |
| working-directory: ${{ env.OPENSTUDIO_BUILD }} | |
| run: | | |
| set -euo pipefail | |
| # Generate a simple markdown summary from CTest results | |
| mkdir -p "$(dirname '${{ env.TEST_DASHBOARD_RELATIVE }}')" | |
| echo "# OpenStudio Test Results - ${{ matrix.test_suffix }}" > "${{ env.TEST_DASHBOARD_RELATIVE }}" | |
| echo "" >> "${{ env.TEST_DASHBOARD_RELATIVE }}" | |
| echo "**Build:** \`${{ github.sha }}\`" >> "${{ env.TEST_DASHBOARD_RELATIVE }}" | |
| echo "**Branch:** \`${{ github.ref_name }}\`" >> "${{ env.TEST_DASHBOARD_RELATIVE }}" | |
| echo "**Platform:** ${{ matrix.pretty }}" >> "${{ env.TEST_DASHBOARD_RELATIVE }}" | |
| echo "**Date:** $(date -u)" >> "${{ env.TEST_DASHBOARD_RELATIVE }}" | |
| echo "" >> "${{ env.TEST_DASHBOARD_RELATIVE }}" | |
| if [ -f Testing/Temporary/LastTest.log ]; then | |
| echo "## Test Log (Last 50 lines)" >> "${{ env.TEST_DASHBOARD_RELATIVE }}" | |
| echo '```' >> "${{ env.TEST_DASHBOARD_RELATIVE }}" | |
| tail -50 Testing/Temporary/LastTest.log >> "${{ env.TEST_DASHBOARD_RELATIVE }}" | |
| echo '```' >> "${{ env.TEST_DASHBOARD_RELATIVE }}" | |
| fi | |
| continue-on-error: true | |
| - name: Upload Testing artifact | |
| if: always() | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: Testing-${{ matrix.platform }}-${{ github.sha }} | |
| path: | | |
| ${{ env.OPENSTUDIO_BUILD }}/Testing-${{ matrix.test_suffix }}/ | |
| ${{ env.OPENSTUDIO_BUILD }}/${{ env.TEST_DASHBOARD_RELATIVE }} | |
| - name: Determine installer path | |
| id: installer_path | |
| if: ${{ !cancelled() }} | |
| run: | | |
| if ls ${{ env.OPENSTUDIO_BUILD }}/signed/*.dmg 1> /dev/null 2>&1; then | |
| echo "path=${{ env.OPENSTUDIO_BUILD }}/signed/*.dmg" >> $GITHUB_OUTPUT | |
| elif ls ${{ env.OPENSTUDIO_BUILD }}/*.dmg 1> /dev/null 2>&1; then | |
| echo "path=${{ env.OPENSTUDIO_BUILD }}/*.dmg" >> $GITHUB_OUTPUT | |
| else | |
| echo "path=${{ env.OPENSTUDIO_BUILD }}/*.zip" >> $GITHUB_OUTPUT | |
| fi | |
| - name: Upload IFW installer | |
| if: always() | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: OS-IFW-${{ matrix.platform }}-${{ github.sha }} | |
| path: ${{ steps.installer_path.outputs.path }} | |
| if-no-files-found: ignore | |
| - name: Upload TGZ package | |
| if: always() | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: OS-TGZ-${{ matrix.platform }}-${{ github.sha }} | |
| path: ${{ env.OPENSTUDIO_BUILD }}/OpenStudio-*.tar.gz | |
| if-no-files-found: ignore | |
| - name: Fail job on test failures | |
| if: ${{ steps.mac_ctest.outputs.exit_code != '0' }} | |
| run: | | |
| echo "::error::CTest suite failed with exit code ${{ steps.mac_ctest.outputs.exit_code }}" | |
| exit 1 | |
| macos-publish: | |
| name: Publish MacOS Artifacts | |
| needs: [macos-build] | |
| runs-on: ubuntu-latest | |
| if: github.ref == 'refs/heads/develop' || github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/tags/v') || inputs.publish_to_s3 == 'true' || github.event.inputs.publish_to_s3 == 'true' | |
| steps: | |
| - name: Download all installers | |
| uses: actions/download-artifact@v4 | |
| with: | |
| pattern: OS-*-macos* | |
| merge-multiple: true | |
| path: installers | |
| - name: Configure AWS credentials | |
| uses: aws-actions/configure-aws-credentials@v4 | |
| with: | |
| aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
| aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
| aws-region: ${{ secrets.AWS_REGION || 'us-west-2' }} | |
| - name: Publish to S3 | |
| working-directory: installers | |
| env: | |
| S3_PREFIX: ${{ github.ref_type == 'tag' && format('releases/{0}/signed', github.ref_name) || format('{0}/signed', github.ref_name) }} | |
| AWS_S3_BUCKET: openstudio-ci-builds | |
| run: | | |
| set -euo pipefail | |
| echo "Uploading artifacts to s3://${AWS_S3_BUCKET}/${S3_PREFIX}" | |
| for file in *; do | |
| [ -e "$file" ] || continue | |
| [ -f "$file" ] || continue | |
| filename=$(basename "$file") | |
| key="${S3_PREFIX}/${filename}" | |
| aws s3 cp "$file" "s3://${AWS_S3_BUCKET}/${key}" --acl public-read | |
| if command -v md5sum >/dev/null 2>&1; then md5sum "$file"; else md5 "$file"; fi | |
| done | |
| windows-build: | |
| name: Build ${{ matrix.pretty }} | |
| if: "!inputs.jobs || contains(inputs.jobs, 'windows-build')" | |
| runs-on: ${{ matrix.os }} | |
| timeout-minutes: 720 | |
| strategy: | |
| fail-fast: false | |
| matrix: | |
| include: | |
| - platform: windows-2022-x64 | |
| pretty: Windows 2022 x64 | |
| os: windows-2022 | |
| test_suffix: Windows-2022 | |
| vs-generator: x64 | |
| arch: x86_64 | |
| python-arch: x64 | |
| allow_failure: false | |
| max_jobs: 4 | |
| exclude_regex: "^(RubyTest-Date_Test-ymd_constructor|BCLFixture.BCLMeasure)$" | |
| defaults: | |
| run: | |
| shell: pwsh | |
| env: | |
| MAX_BUILD_THREADS: ${{ matrix.max_jobs }} | |
| CTEST_PARALLEL_LEVEL: ${{ matrix.max_jobs }} | |
| RUBYOPT: "-Eutf-8:utf-8" | |
| PYTHONUTF8: "1" | |
| permissions: | |
| contents: write | |
| steps: | |
| - name: Checkout repository | |
| uses: actions/checkout@v6 | |
| with: | |
| path: ${{ env.OPENSTUDIO_SOURCE }} | |
| fetch-depth: 1 | |
| - name: Verify space | |
| run: | | |
| Get-PSDrive C | Select-Object Used,Free | |
| Get-CimInstance Win32_OperatingSystem | Select-Object TotalVisibleMemorySize,FreePhysicalMemory,TotalVirtualMemorySize,FreeVirtualMemory | |
| - name: Git safe directory | |
| working-directory: ${{ env.OPENSTUDIO_SOURCE }} | |
| run: git config --global --add safe.directory '*' | |
| - name: Restore sccache cache | |
| uses: actions/cache@v4 | |
| with: | |
| path: ${{ github.workspace }}\.sccache | |
| key: sccache-${{ matrix.os }}-${{ matrix.platform }}-${{ hashFiles('conan.lock') }} | |
| restore-keys: | | |
| sccache-${{ matrix.os }}-${{ matrix.platform }}- | |
| - name: Patch tests for Windows | |
| working-directory: ${{ env.OPENSTUDIO_SOURCE }} | |
| run: | | |
| # Patch openstudio.py for build tree DLL loading | |
| $os_py = "python/module/openstudio.py" | |
| if (Test-Path $os_py) { | |
| $content = Get-Content $os_py | |
| $new_content = @() | |
| foreach ($line in $content) { | |
| $new_content += $line | |
| if ($line -match "os.add_dll_directory\(bin_dir\)") { | |
| $new_content += " products_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))" | |
| $new_content += " if os.path.isdir(products_dir) and os.path.isfile(os.path.join(products_dir, 'openstudio_utilities.dll')):" | |
| $new_content += " os.add_dll_directory(products_dir)" | |
| $new_content += " # Add PATH to DLL search, needed for Conan dependencies on Python 3.8+" | |
| $new_content += " for p in os.environ['PATH'].split(';'):" | |
| $new_content += " if p and os.path.isdir(p):" | |
| $new_content += " try:" | |
| $new_content += " os.add_dll_directory(p)" | |
| $new_content += " except:" | |
| $new_content += " pass" | |
| } | |
| } | |
| $new_content | Set-Content $os_py | |
| } | |
| - name: Prepare workspace | |
| run: | | |
| git config --global --add safe.directory "*" | |
| New-Item -ItemType Directory -Path "${{ env.OPENSTUDIO_BUILD }}" -Force | |
| - name: Setup sccache | |
| uses: Mozilla-Actions/sccache-action@v0.0.5 | |
| - name: Restore Conan cache | |
| uses: actions/cache@v4 | |
| with: | |
| path: ~/.conan2 | |
| key: conan-${{ matrix.os }}-windows-${{ hashFiles('conan.lock') }} | |
| restore-keys: | | |
| conan-${{ matrix.os }}-windows- | |
| - name: Set up Python 3.12.2 | |
| uses: actions/setup-python@v6 | |
| with: | |
| python-version: '3.12.2' | |
| cache: 'pip' | |
| - name: Install Python dependencies | |
| working-directory: ${{ env.OPENSTUDIO_SOURCE }} | |
| run: | | |
| python -m pip install --upgrade pip setuptools wheel | |
| python -m pip install -r python/requirements.txt | |
| - name: Install Conan | |
| working-directory: ${{ env.OPENSTUDIO_SOURCE }} | |
| run: | | |
| python -m pip install conan | |
| - name: Install Ruby | |
| uses: ruby/setup-ruby@v1 | |
| with: | |
| ruby-version: '3.2.2' | |
| bundler-cache: true | |
| - name: Install System dependencies and LaTeX | |
| shell: bash | |
| run: | | |
| set -x | |
| echo "Downloading MiKTeX CLI installer" | |
| # We download from a specific mirror already # TODO: Should store this setup package somewhere ourselves | |
| curl -L -O --retry 5 --retry-connrefused https://ctan.math.utah.edu/ctan/tex-archive/systems/win32/miktex/setup/windows-x64/miktexsetup-5.5.0%2B1763023-x64.zip | |
| unzip miktexsetup-5.5.0%2B1763023-x64.zip | |
| echo "Setting up the local package directory via download" | |
| ./miktexsetup_standalone.exe --verbose \ | |
| --local-package-repository=C:/ProgramData/MiKTeX-Repo \ | |
| --remote-package-repository="https://ctan.math.utah.edu/ctan/tex-archive/systems/win32/miktex/tm/packages/" \ | |
| --package-set=essential \ | |
| download | |
| echo "Installing from the local package directory previously set up" | |
| ./miktexsetup_standalone.exe --verbose \ | |
| --local-package-repository=C:/ProgramData/MiKTeX-Repo \ | |
| --package-set=essential \ | |
| --shared \ | |
| install | |
| echo "Adding MiKTeX bin folder to PATH and to GITHUB_PATH" | |
| echo "C:/Program Files/MiKTeX/miktex/bin/x64/" >> $GITHUB_PATH | |
| export PATH="/c/Program Files/MiKTeX/miktex/bin/x64/:$PATH" | |
| echo "Configuring MiKTeX to install missing packages on the fly" | |
| initexmf --admin --verbose --set-config-value='[MPM]AutoInstall=1' | |
| echo "Configure default mirror for packages" | |
| mpm --admin --set-repository="https://ctan.math.utah.edu/ctan/tex-archive/systems/win32/miktex/tm/packages/" | |
| # Avoid annoying warning: "xelatex: major issue: So far, you have not checked for updates as a MiKTeX user." | |
| mpm --find-updates | |
| mpm --admin --find-updates | |
| - name: Create Build Directory | |
| run: cmake -E make_directory ${{ env.OPENSTUDIO_BUILD }} | |
| - name: Configure Conan remotes | |
| run: | | |
| conan remote remove nrel-v2 | |
| if ($LASTEXITCODE -ne 0) { $LASTEXITCODE = 0 } | |
| conan remote add nrel-v2 https://conan.openstudio.net/artifactory/api/conan/conan-v2 | |
| if ($LASTEXITCODE -ne 0) { throw "Failed to add nrel-v2 remote" } | |
| conan remote enable nrel-v2 | |
| conan remote remove conancenter | |
| if ($LASTEXITCODE -ne 0) { $LASTEXITCODE = 0 } | |
| conan remote add conancenter https://center2.conan.io | |
| if ($LASTEXITCODE -ne 0) { throw "Failed to add conancenter remote" } | |
| conan remote enable conancenter | |
| if (-not (Test-Path "$env:USERPROFILE/.conan2/profiles/default")) { | |
| conan profile detect | |
| } | |
| conan config home | |
| - name: Conan install | |
| working-directory: ${{ env.OPENSTUDIO_SOURCE }} | |
| run: | | |
| $env:CMAKE_POLICY_VERSION_MINIMUM="3.5" | |
| conan install . ` | |
| --output-folder="../${{ env.OPENSTUDIO_BUILD }}" ` | |
| --build=missing ` | |
| -c tools.cmake.cmaketoolchain:generator=Ninja ` | |
| -s compiler.cppstd=20 ` | |
| -s build_type=${{ env.BUILD_TYPE }} | |
| - name: Locate Ruby | |
| run: | | |
| $rubyPath = (Get-Command ruby).Source | |
| "SYSTEM_RUBY_PATH=$rubyPath" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append | |
| - name: Locate Python | |
| run: | | |
| $pythonPath = (Get-Command python).Source | |
| "SYSTEM_PYTHON_PATH=$pythonPath" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append | |
| - name: Configure with CMake | |
| working-directory: ${{ env.OPENSTUDIO_BUILD }} | |
| run: | | |
| $sccacheExe = (Get-Command sccache).Source | |
| & $env:ComSpec /c "call conanbuild.bat && cmake -G Ninja -DCMAKE_C_COMPILER_LAUNCHER=`"$sccacheExe`" -DCMAKE_CXX_COMPILER_LAUNCHER=`"$sccacheExe`" -DCMAKE_TOOLCHAIN_FILE=conan_toolchain.cmake -DCMAKE_BUILD_TYPE:STRING=${{ env.BUILD_TYPE }} -DBUILD_TESTING:BOOL=ON -DCPACK_GENERATOR:STRING=`"NSIS;TGZ`" -DBUILD_PYTHON_BINDINGS:BOOL=ON -DBUILD_PYTHON_PIP_PACKAGE:BOOL=OFF -DPython_EXECUTABLE:FILEPATH=`"$env:SYSTEM_PYTHON_PATH`" -DPYTHON_VERSION:STRING=${{ env.PYTHON_REQUIRED_VERSION }} -DSYSTEM_RUBY_EXECUTABLE=`"%SYSTEM_RUBY_PATH%`" -DCMAKE_JOB_POOL_LINK:STRING=console `"../${{ env.OPENSTUDIO_SOURCE }}"`" | |
| - name: Build with Ninja | |
| working-directory: ${{ env.OPENSTUDIO_BUILD }} | |
| shell: pwsh | |
| run: | | |
| if (Get-Command sccache -ErrorAction SilentlyContinue) { sccache -s } | |
| # Use $env:ComSpec to ensure we call the Windows Command Prompt, not the MSYS2 cmd found in PATH | |
| & $env:ComSpec /c "call conanbuild.bat && cmake --build . --parallel ${{ matrix.max_jobs }} -- -d stats 2>&1" | Tee-Object -FilePath "build.log" | |
| # Check the exit code of the cmd process, not Tee-Object | |
| if ($LASTEXITCODE -ne 0) { | |
| Write-Error "Build failed with exit code $LASTEXITCODE" | |
| exit $LASTEXITCODE | |
| } | |
| if (Get-Command sccache -ErrorAction SilentlyContinue) { sccache -s } | |
| - name: Wait for network stability | |
| if: always() | |
| run: Start-Sleep -Seconds 5 | |
| - name: Upload build diagnostics | |
| if: always() | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: build-diag-${{ matrix.platform }}-${{ github.sha }} | |
| path: | | |
| ${{ env.OPENSTUDIO_BUILD }}/build.log | |
| ${{ env.OPENSTUDIO_BUILD }}/.ninja_log | |
| ${{ env.OPENSTUDIO_BUILD }}/CTestTestfile.cmake | |
| if-no-files-found: warn | |
| - name: Run CTest suite | |
| id: ctest | |
| continue-on-error: true | |
| working-directory: ${{ env.OPENSTUDIO_BUILD }} | |
| shell: pwsh | |
| run: | | |
| $env_vars = & $env:ComSpec /c "call conanbuild.bat && set" | |
| foreach ($line in $env_vars) { | |
| if ($line -match '^(.*?)=(.*)$') { | |
| $name = $matches[1] | |
| $value = $matches[2] | |
| if ($name -ne "" -and $name -notmatch "^=") { | |
| [Environment]::SetEnvironmentVariable($name, $value, "Process") | |
| } | |
| } | |
| } | |
| # Add build Products directory to Path so Python can find _openstudioairflow.pyd and its dependencies | |
| $products_dir = Join-Path (Get-Location) "Products" | |
| $env:Path = "$products_dir;" + $env:Path | |
| # Conflicting tests that must run sequentially | |
| $resource_locked_tests = "ModelFixture.ScheduleFile|ModelFixture.ScheduleFileAltCtor|ModelFixture.PythonPluginInstance|ModelFixture.PythonPluginInstance_NotPYFile|ModelFixture.PythonPluginInstance_ClassNameValidation|ModelFixture.ChillerElectricASHRAE205_GettersSetters|ModelFixture.ChillerElectricASHRAE205_Loops|ModelFixture.ChillerElectricASHRAE205_NotCBORFile|ModelFixture.ChillerElectricASHRAE205_Clone" | |
| $overall_exit_code = 0 | |
| $exclude_regex = "${{ matrix.exclude_regex }}" | |
| $env:CTEST_OUTPUT_ON_FAILURE = "1" | |
| Write-Host "Running sequential tests..." | |
| if ([string]::IsNullOrEmpty($exclude_regex) -or $exclude_regex -eq '""') { | |
| ctest -C ${{ env.BUILD_TYPE }} -R "^($resource_locked_tests)$" -j 1 -T test | |
| } else { | |
| ctest -C ${{ env.BUILD_TYPE }} -R "^($resource_locked_tests)$" -E "$exclude_regex" -j 1 -T test | |
| } | |
| if ($LASTEXITCODE -ne 0) { $overall_exit_code = 1 } | |
| Write-Host "Running all other tests in parallel..." | |
| if ([string]::IsNullOrEmpty($exclude_regex) -or $exclude_regex -eq '""') { | |
| $final_exclude = "^($resource_locked_tests)$" | |
| } else { | |
| $final_exclude = "($exclude_regex|$resource_locked_tests)" | |
| } | |
| ctest -C ${{ env.BUILD_TYPE }} -E "$final_exclude" -j ${{ matrix.max_jobs }} -T test | |
| if ($LASTEXITCODE -ne 0) { $overall_exit_code = 1 } | |
| if ($overall_exit_code -ne 0) { | |
| Write-Host "Rerunning failing tests..." | |
| ctest -C ${{ env.BUILD_TYPE }} --rerun-failed -T test | |
| if ($LASTEXITCODE -eq 0) { $overall_exit_code = 0 } else { $overall_exit_code = 1 } | |
| } | |
| "exit_code=$overall_exit_code" | Out-File -FilePath $env:GITHUB_OUTPUT -Append | |
| - name: Copy Testing tree with suffix | |
| if: always() | |
| working-directory: ${{ env.OPENSTUDIO_BUILD }} | |
| run: | | |
| if (Test-Path "Testing") { | |
| Copy-Item -Path "Testing" -Destination "Testing-${{ matrix.test_suffix }}" -Recurse -Force | |
| } | |
| - name: Generate test summary | |
| if: always() | |
| working-directory: ${{ env.OPENSTUDIO_BUILD }} | |
| run: | | |
| $dashboardPath = "${{ env.TEST_DASHBOARD_RELATIVE }}" | |
| $dashboardDir = Split-Path -Parent $dashboardPath | |
| if (-not (Test-Path $dashboardDir)) { New-Item -ItemType Directory -Path $dashboardDir -Force } | |
| "# OpenStudio Test Results - ${{ matrix.test_suffix }}" | Out-File -FilePath $dashboardPath -Encoding utf8 | |
| "" | Out-File -FilePath $dashboardPath -Encoding utf8 -Append | |
| "**Build:** ``${{ github.sha }}``" | Out-File -FilePath $dashboardPath -Encoding utf8 -Append | |
| "**Branch:** ``${{ github.ref_name }}``" | Out-File -FilePath $dashboardPath -Encoding utf8 -Append | |
| "**Platform:** ${{ matrix.pretty }}" | Out-File -FilePath $dashboardPath -Encoding utf8 -Append | |
| "**Date:** $(Get-Date -Format u)" | Out-File -FilePath $dashboardPath -Encoding utf8 -Append | |
| "" | Out-File -FilePath $dashboardPath -Encoding utf8 -Append | |
| if (Test-Path "Testing/Temporary/LastTest.log") { | |
| "## Test Log (Last 50 lines)" | Out-File -FilePath $dashboardPath -Encoding utf8 -Append | |
| '```' | Out-File -FilePath $dashboardPath -Encoding utf8 -Append | |
| Get-Content "Testing/Temporary/LastTest.log" -Tail 50 | Out-File -FilePath $dashboardPath -Encoding utf8 -Append | |
| '```' | Out-File -FilePath $dashboardPath -Encoding utf8 -Append | |
| } | |
| continue-on-error: true | |
| - name: Upload Testing artifact | |
| if: always() | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: Testing-${{ matrix.platform }}-${{ github.sha }} | |
| path: | | |
| ${{ env.OPENSTUDIO_BUILD }}/Testing-${{ matrix.test_suffix }}/ | |
| ${{ env.OPENSTUDIO_BUILD }}/${{ env.TEST_DASHBOARD_RELATIVE }} | |
| # CODE SIGNING SETUP | |
| - name: Setup Node.js | |
| if: github.ref == 'refs/heads/develop' || github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/tags/v') || inputs.publish_to_s3 == 'true' || github.event.inputs.publish_to_s3 == 'true' | |
| uses: actions/setup-node@v4 | |
| with: | |
| node-version: "18" | |
| - name: Create .env file for Signing | |
| if: github.ref == 'refs/heads/develop' || github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/tags/v') || inputs.publish_to_s3 == 'true' || github.event.inputs.publish_to_s3 == 'true' | |
| working-directory: ${{ env.OPENSTUDIO_BUILD }} | |
| run: | | |
| echo "ACCESS_KEY=${{ secrets.AWS_SIGNING_ACCESS_KEY }}" >> .env | |
| echo "SECRET_KEY=${{ secrets.AWS_SIGNING_SECRET_KEY }}" >> .env | |
| - name: Code sign binaries | |
| if: success() && (github.ref == 'refs/heads/develop' || github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/tags/v') || inputs.publish_to_s3 == 'true' || github.event.inputs.publish_to_s3 == 'true') | |
| working-directory: ${{ env.OPENSTUDIO_BUILD }} | |
| run: | | |
| # Check if signing client exists | |
| $canSign = $true | |
| if (-not (Test-Path "../${{ env.OPENSTUDIO_SOURCE }}/.github/signing-client/code-signing.js")) { | |
| Write-Host "::warning::Code signing client not found at .github/signing-client/code-signing.js" | |
| Write-Host "::warning::Skipping code signing. Add signing client files to repository." | |
| $canSign = $false | |
| } | |
| # Check if AWS signing credentials are configured | |
| if ([string]::IsNullOrEmpty("${{ secrets.AWS_SIGNING_ACCESS_KEY }}")) { | |
| Write-Host "::warning::AWS_SIGNING_ACCESS_KEY secret not configured" | |
| Write-Host "::warning::Skipping code signing. Configure AWS signing secrets." | |
| $canSign = $false | |
| } | |
| if ($canSign) { | |
| Write-Host "------------------------------------------------------------" | |
| Write-Host "Signing Binaries (Pre-CPack)" | |
| Write-Host "------------------------------------------------------------" | |
| $pathsToSign = @() | |
| if (Test-Path "bin") { $pathsToSign += "bin" } | |
| if (Test-Path "Products") { $pathsToSign += "Products" } | |
| if ($pathsToSign.Count -gt 0) { | |
| $binZip = "binaries_to_sign.zip" | |
| $signedBinZip = "binaries_to_sign.signed.zip" | |
| Write-Host "Archiving binaries from: $pathsToSign" | |
| Compress-Archive -Path $pathsToSign -DestinationPath $binZip -Force | |
| Write-Host "Sending binaries for signing..." | |
| node "../${{ env.OPENSTUDIO_SOURCE }}/.github/signing-client/code-signing.js" $binZip -t 4800000 | |
| if (Test-Path $signedBinZip) { | |
| Write-Host "Extracting and overwriting signed binaries..." | |
| $tempDir = "temp_signed_binaries" | |
| if (Test-Path $tempDir) { Remove-Item $tempDir -Recurse -Force } | |
| Expand-Archive -Path $signedBinZip -DestinationPath $tempDir -Force | |
| # Copy back to overwrite | |
| Copy-Item -Path "$tempDir\*" -Destination . -Recurse -Force | |
| # Cleanup | |
| Remove-Item $tempDir -Recurse -Force | |
| Remove-Item $binZip -Force | |
| Remove-Item $signedBinZip -Force | |
| Write-Host "Binaries signed successfully." | |
| } else { | |
| Write-Host "::error::Signed binaries zip not found!" | |
| exit 1 | |
| } | |
| } else { | |
| Write-Host "::warning::No bin/ or Products/ directories found to sign." | |
| } | |
| } | |
| - name: Create packages | |
| if: ${{ success() && !cancelled() }} | |
| working-directory: ${{ env.OPENSTUDIO_BUILD }} | |
| run: | | |
| & $env:ComSpec /c "call conanbuild.bat && cpack -C ${{ env.BUILD_TYPE }}" | |
| - name: Code sign installer | |
| if: success() && (github.ref == 'refs/heads/develop' || github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/tags/v') || inputs.publish_to_s3 == 'true' || github.event.inputs.publish_to_s3 == 'true') | |
| working-directory: ${{ env.OPENSTUDIO_BUILD }} | |
| run: | | |
| # Check if signing client exists | |
| $canSign = $true | |
| if (-not (Test-Path "../${{ env.OPENSTUDIO_SOURCE }}/.github/signing-client/code-signing.js")) { | |
| $canSign = $false | |
| } | |
| # Check if AWS signing credentials are configured | |
| if ([string]::IsNullOrEmpty("${{ secrets.AWS_SIGNING_ACCESS_KEY }}")) { | |
| $canSign = $false | |
| } | |
| if ($canSign) { | |
| Write-Host "------------------------------------------------------------" | |
| Write-Host "Signing Final Installer" | |
| Write-Host "------------------------------------------------------------" | |
| $installerZip = "installer_to_sign_${{ github.sha }}.zip" | |
| $signedInstallerZip = "installer_to_sign_${{ github.sha }}.signed.zip" | |
| $installers = Get-ChildItem -Filter "OpenStudio-*.exe" | |
| if ($installers.Count -gt 0) { | |
| Write-Host "Found installer(s): $($installers.Name)" | |
| # Calculate hash of original installer before compression | |
| $originalHash = (Get-FileHash -Path $installers[0].FullName -Algorithm SHA256).Hash | |
| Write-Host "Original installer hash (SHA256): $originalHash" | |
| Compress-Archive -Path $installers.FullName -DestinationPath $installerZip -Force | |
| Write-Host "Sending installer for signing..." | |
| node "../${{ env.OPENSTUDIO_SOURCE }}/.github/signing-client/code-signing.js" $installerZip -t 4800000 | |
| if (Test-Path $signedInstallerZip) { | |
| Write-Host "Extracting signed installer..." | |
| if (-not (Test-Path signed)) { New-Item -ItemType Directory -Path signed | Out-Null } | |
| Expand-Archive -Path $signedInstallerZip -DestinationPath signed -Force | |
| # Verify the extracted file exists and matches expected name | |
| $extractedFiles = Get-ChildItem -Path signed -Filter "*.exe" | |
| if ($extractedFiles.Count -eq 0) { | |
| Write-Host "::error::No EXE file found in signed archive!" | |
| exit 1 | |
| } | |
| $extractedFile = $extractedFiles[0] | |
| $expectedName = $installers[0].Name | |
| if ($extractedFile.Name -ne $expectedName) { | |
| Write-Host "::warning::Signed file name mismatch!" | |
| Write-Host " Expected: $expectedName" | |
| Write-Host " Got: $($extractedFile.Name)" | |
| # Rename to match the original filename to prevent distributing misnamed file | |
| $newPath = Join-Path -Path signed -ChildPath $expectedName | |
| Rename-Item -Path $extractedFile.FullName -NewName $newPath -Force | |
| Write-Host " Renamed to match original: $expectedName" | |
| } | |
| # Cleanup | |
| Remove-Item $installerZip -Force | |
| Remove-Item $signedInstallerZip -Force | |
| Write-Host "Installer signed successfully." | |
| } else { | |
| Write-Host "::error::Signed installer zip not found!" | |
| exit 1 | |
| } | |
| } else { | |
| Write-Host "::warning::No OpenStudio installer found to sign." | |
| } | |
| } | |
| - name: Cleanup intermediate files | |
| if: always() | |
| working-directory: ${{ env.OPENSTUDIO_BUILD }} | |
| run: | | |
| Get-ChildItem -Path . -Include "*.obj" -Recurse -Force | Remove-Item -Force -ErrorAction SilentlyContinue | |
| Get-PSDrive C | Select-Object Used,Free | |
| - name: Fail job on test failures | |
| if: ${{ steps.ctest.outputs.exit_code != '0' }} | |
| run: | | |
| echo "::error::CTest suite failed with exit code ${{ steps.ctest.outputs.exit_code }}" | |
| exit 1 | |
| - name: Upload Signed EXE installer | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: OS-Installers-${{ matrix.platform }}-EXE-${{ github.sha }} | |
| path: ${{ env.OPENSTUDIO_BUILD }}/signed/OpenStudio*.exe | |
| if-no-files-found: ignore | |
| - name: Upload Signed TGZ installer | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: OS-Installers-${{ matrix.platform }}-TGZ-${{ github.sha }} | |
| path: ${{ env.OPENSTUDIO_BUILD }}/_CPack_Packages/win64/TGZ/*.tar.gz | |
| if-no-files-found: ignore | |
| windows-publish: | |
| name: Publish Windows Artifacts | |
| needs: [windows-build] | |
| runs-on: ubuntu-latest | |
| if: github.ref == 'refs/heads/develop' || github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/tags/v') || inputs.publish_to_s3 == 'true' || github.event.inputs.publish_to_s3 == 'true' | |
| steps: | |
| - name: Download all installers | |
| uses: actions/download-artifact@v4 | |
| with: | |
| pattern: OS-Installers-windows-2022-x64-* | |
| path: installers | |
| - name: Configure AWS credentials | |
| uses: aws-actions/configure-aws-credentials@v4 | |
| with: | |
| aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
| aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
| aws-region: ${{ secrets.AWS_REGION || 'us-west-2' }} | |
| - name: Publish to S3 | |
| working-directory: installers | |
| env: | |
| S3_PREFIX: ${{ github.ref_type == 'tag' && format('releases/{0}/signed', github.ref_name) || format('{0}/signed', github.ref_name) }} | |
| AWS_S3_BUCKET: openstudio-ci-builds | |
| run: | | |
| set -euo pipefail | |
| echo "Uploading artifacts to s3://${AWS_S3_BUCKET}/${S3_PREFIX}" | |
| # Find installers in artifact subdirectories | |
| SIGNED_EXE=$(find . -name "*.exe" | grep "SIGNED" | head -n 1 || true) | |
| UNSIGNED_EXE=$(find . -name "*.exe" | grep -v "SIGNED" | head -n 1 || true) | |
| if [ -n "$SIGNED_EXE" ]; then | |
| echo "Uploading signed installer: $SIGNED_EXE" | |
| filename=$(basename "$SIGNED_EXE") | |
| aws s3 cp "$SIGNED_EXE" "s3://${AWS_S3_BUCKET}/${S3_PREFIX}/${filename}" --acl public-read | |
| elif [ -n "$UNSIGNED_EXE" ]; then | |
| echo "Uploading unsigned installer: $UNSIGNED_EXE" | |
| filename=$(basename "$UNSIGNED_EXE") | |
| aws s3 cp "$UNSIGNED_EXE" "s3://${AWS_S3_BUCKET}/${S3_PREFIX}/${filename}" --acl public-read | |
| fi | |
| # Upload tarballs | |
| for file in $(find . -name "*.tar.gz"); do | |
| filename=$(basename "$file") | |
| aws s3 cp "$file" "s3://${AWS_S3_BUCKET}/${S3_PREFIX}/${filename}" --acl public-read | |
| done | |