Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions .github/workflows/build-and-test-linux.yml
Original file line number Diff line number Diff line change
Expand Up @@ -361,14 +361,14 @@ jobs:
$MPICC mpi_test.c -o mpi_test
mpirun -np 2 ./mpi_test

- uses: julia-actions/cache@v2
if: needs.filter.outputs.test == 'true'

- uses: julia-actions/setup-julia@v2
if: needs.filter.outputs.test == 'true'
with:
version: '1'

- uses: julia-actions/cache@v2
if: needs.filter.outputs.test == 'true'

- name: Build Palace
if: needs.filter.outputs.test == 'true'
env:
Expand Down
3 changes: 3 additions & 0 deletions .github/workflows/build-and-test-macos.yml
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,9 @@ jobs:
with:
version: '1'

- uses: julia-actions/cache@v2
if: needs.filter.outputs.test == 'true'

- name: Configure Open MPI
if: needs.filter.outputs.test == 'true' && matrix.mpi == 'openmpi'
run: |
Expand Down
27 changes: 23 additions & 4 deletions .github/workflows/docs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,18 +36,29 @@ jobs:
buildcache: true
color: true

# Workaround for https://github.com/spack/spack/issues/51505
#
# Currently not needed because we have a full copy of mfem and libceed
# because of https://github.com/spack/spack-packages/pull/2361

# - name: Overwrite builtin Palace package
# if: needs.filter.outputs.test == 'true'
# run: |
# ln -s "$(spack location --repo builtin)/packages/libceed" spack_repo/local/packages/
# ln -s "$(spack location --repo builtin)/packages/mfem" spack_repo/local/packages/

- name: Setup Environment
run: |
# Spack.yaml with most / all settings configured
cat << EOF > spack.yaml
spack:
specs:
- palace # we install this without a cache each time
- local.palace@develop # we install this without a cache each time
view: false
config:
install_tree:
root: /opt/spack
padded_length: False
padded_length: false
concretizer:
reuse: false
unify: true
Expand Down Expand Up @@ -98,7 +109,7 @@ jobs:
- name: Concretize Spack
run: |
# Using `spack develop` in order to have an in-source build
spack -e . develop --path=$(pwd) local.palace@git."${{ github.head_ref || github.ref_name }}"=develop
spack -e . develop --path=$(pwd) palace@git."${{ github.head_ref || github.ref_name }}"=develop
spack -e . concretize -f

# Relies on cache-hit(s)
Expand All @@ -111,7 +122,15 @@ jobs:
run: |
spack -e . install --only-concrete --show-log-on-error --only package --keep-stage --no-cache
# If you want to use the branch-local source instead (should we always do this?)
# spack -e . develop --path=$(pwd) local.palace@git."${{ github.head_ref || github.ref_name }}"=develop
# spack -e . develop --path=$(pwd) palace@git."${{ github.head_ref || github.ref_name }}"=develop

- uses: julia-actions/setup-julia@v2
if: needs.filter.outputs.test == 'true'
with:
version: '1'

- uses: julia-actions/cache@v2
if: needs.filter.outputs.test == 'true'

- name: Build and deploy
env:
Expand Down
89 changes: 89 additions & 0 deletions .github/workflows/generate_test_matrix.py
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Left a comment in spack.yaml, but it's unclear how this is used in relation to the GitHub workflow.

Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
#!/usr/bin/env python3

"""Test Matrix Generator

Generates pairwise test combinations used in spack.yml using
[allpairspy](https://github.com/thombashi/allpairspy).

## Usage

```bash
pip install -U pyyaml allpairspy
python generate_test_matrix.py
```

It enforces certain constrains.

"""
from allpairspy import AllPairs
import yaml

parameters = [
["x86", "x86", "arm"], # Favor x86
["gcc", "llvm", "intel-oneapi-compilers"],
["openmpi", "mpich", "intel-oneapi-mpi"],
["openblas", "amdblis", "armpl-gcc", "intel-oneapi-mkl"],
["+shared", "~shared"],
["+int64", "~int64"],
["~openmp", "+openmp"],
["+arpack", "+slepc"],
["+mumps", "+superlu-dist", "+strumpack"],
["~cuda"],
]

def is_valid(combo):
if len(combo) < 4:
# AllPairs creates partial combinations
return True

arch, compiler, mpi, math_libs = combo[0], combo[1], combo[2], combo[3]

# Intel packages must all be together and only on x86
intel_packages = [compiler == "intel-oneapi-compilers",
mpi == "intel-oneapi-mpi",
math_libs == "intel-oneapi-mkl"]
if any(intel_packages):
if arch != "x86" or not all(intel_packages):
return False

# Use ARMPL only with GCC and ARM
if math_libs == "armpl-gcc" and (compiler != "gcc" or arch != "arm"):
return False

# On ARM, use armpl-gcc or openblas
if arch == "arm" and math_libs not in ("armpl-gcc", "openblas"):
return False

return True

matrix = []
for combo in AllPairs(parameters, filter_func=is_valid):
matrix.append({
"arch": combo[0],
"compiler": combo[1],
"mpi": combo[2],
"math-libs": combo[3],
"shared": combo[4],
"int": combo[5],
"openmp": combo[6],
"eigensolver": combo[7],
"solver": combo[8],
"cuda": combo[9],
})

# Add one case where we turn a multiple solvers (to check that there's no
# problem with compiling multiple solvers)
matrix.append({
"arch": "x86",
"compiler": "gcc",
"mpi": "openmpi",
"math-libs": "openblas",
"shared": "~shared",
"int": "~int64",
"openmp": "+openmp",
"eigensolver": "+slepc+arpack",
"solver": "+superlu-dist+mumps+sundials+strumpack",
"cuda": "~cuda"
})

print(yaml.dump(matrix, default_flow_style=False, sort_keys=False))
Loading
Loading