Skip to content

clean up string handling in unit tests; built tests based on MTX_FMT … #20

clean up string handling in unit tests; built tests based on MTX_FMT …

clean up string handling in unit tests; built tests based on MTX_FMT … #20

name: Build and Run Unit Tests
on:
push:
branches: [ "main", "dane_dev" ]
pull_request:
branches: [ "main", "dane_dev" ]
jobs:
testMPI:
runs-on: ubuntu-latest
# Define a build matrix over compilers
strategy:
matrix:
compiler: [GCC, ICC, CLANG]
steps:
# 1) Check out the repository code
- name: Checkout repository
uses: actions/checkout@v4
# 2) Set up Spack for package management
- name: Set-up Spack
uses: spack/setup-spack@v2
with:
ref: develop # Use the 'develop' branch of the spack/setup-spack action
buildcache: true # Enable Spack binary cache
color: true # Enable colored output
path: spack # Install Spack under ./spack directory
# 3) Install necessary compiler and MPI packages via Spack
- name: Install Compilers and MPI Wrappers
run: |
# Source Spack environment to get spack commands
. ./spack/share/spack/setup-env.sh
# Based on matrix.compiler, install the right packages
case "${{ matrix.compiler }}" in
GCC)
# Install OpenMPI for GCC
spack install -j 4 openmpi;;
ICC)
# Install Intel compilers and Intel MPI
spack install -j 4 intel-oneapi-compilers
spack install -j 4 intel-oneapi-mpi;;
CLANG)
# Install LLVM/Clang and OpenMPI
spack install -j 4 llvm
spack install -j 4 openmpi;;
esac
# 4) Configure, build, and run tests in one step to preserve environment
- name: Configure and Make SparseBench Tests
run: |
# Re-source Spack so we have spack load available
. ./spack/share/spack/setup-env.sh
# Based on matrix.compiler, load the correct compiler/MPI into this shell
case "${{ matrix.compiler }}" in
GCC)
# Load OpenMPI for GCC
eval "$(spack load --sh openmpi)";;
ICC)
# Load Intel compilers and Intel MPI
eval "$(spack load --sh intel-oneapi-compilers)"
eval "$(spack load --sh intel-oneapi-mpi)";;
CLANG)
# Load LLVM/Clang and OpenMPI
eval "$(spack load --sh llvm)"
eval "$(spack load --sh openmpi)";;
esac
# Export TOOLCHAIN for the Makefile
export TOOLCHAIN="${{ matrix.compiler }}"
# Define reusable logic
run_tests() {
local FMT=$1
echo ">>> Building and testing with $FMT matrix format."
sed -E -i \
-e 's/^(ENABLE_MPI[[:space:]]*\?=).*/\1true/' \
-e 's/^(ENABLE_OPENMP[[:space:]]*\?=).*/\1false/' \
-e "s/^(MTX_FMT[[:space:]]*\?=).*/\1$FMT/" \
-e "s/^(TOOLCHAIN[[:space:]]*\?=).*/\1${{ matrix.compiler }}/" \
config.mk
make -j # Build sparseBench
cd tests
make -j # Build tests
mpirun -n 1 ./runTests # Run (single rank) tests
cd ..
}
# Run both test formats
run_tests CRS
run_tests SCS
# # Set build configuration flags to CRS in config.mk
# sed -E -i \
# -e 's/^(ENABLE_MPI[[:space:]]*\?=[[:space:]]*).*/\1true/' \
# -e 's/^(ENABLE_OPENMP[[:space:]]*\?=[[:space:]]*).*/\1false/' \
# -e 's/^(MTX_FMT[[:space:]]*\?=[[:space:]]*).*/\1CRS/' \
# -e "s/^(TOOLCHAIN[[:space:]]*\?=[[:space:]]*).*/\1${TOOLCHAIN}/" \
# config.mk
# make # Build the main project
# cd tests # Move into the tests directory
# make # Build test suite
# mpirun -n 1 ./runTests # Run the (single rank) tests
# cd ../
# # Set build configuration flags to SCS in config.mk
# sed -E -i \
# -e 's/^(ENABLE_MPI[[:space:]]*\?=[[:space:]]*).*/\1true/' \
# -e 's/^(ENABLE_OPENMP[[:space:]]*\?=[[:space:]]*).*/\1false/' \
# -e 's/^(MTX_FMT[[:space:]]*\?=[[:space:]]*).*/\1SCS/' \
# -e "s/^(TOOLCHAIN[[:space:]]*\?=[[:space:]]*).*/\1${TOOLCHAIN}/" \
# config.mk
# make # Build the main project
# cd tests # Move into the tests directory
# make # Build test suite
# mpirun -n 1 ./runTests # Run the (single rank) tests