Skip to content

Commit 2f5a38f

Browse files
Sébastien LoiselSébastien Loisel
authored andcommitted
Bundle MPICH with PETSc for ABI compatibility
- Use --download-mpich in PETSc build to bundle MPICH - Package both lib/ and bin/ directories in tarball - CI uses bundled libmpi and mpiexec from tarball - Fixes MPI ABI mismatch between build and test environments
1 parent f52e487 commit 2f5a38f

3 files changed

Lines changed: 32 additions & 24 deletions

File tree

.github/workflows/CI.yml

Lines changed: 16 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -71,11 +71,11 @@ jobs:
7171
steps:
7272
- uses: actions/checkout@v4
7373

74-
- name: Install MPI and CUDA runtime
74+
- name: Install CUDA runtime
7575
run: |
7676
sudo apt-get update
77-
# Install system MPICH (same as build environment) and CUDA runtime
78-
sudo apt-get install -y mpich libmpich-dev nvidia-cuda-toolkit
77+
# Only install CUDA runtime (MPI is bundled in the tarball)
78+
sudo apt-get install -y nvidia-cuda-toolkit
7979
8080
- uses: julia-actions/setup-julia@v2
8181
with:
@@ -84,7 +84,7 @@ jobs:
8484

8585
# Note: Intentionally NOT using julia-actions/cache@v2 here
8686
# The cache would restore MPI.jl compiled against JLL MPI, conflicting
87-
# with the system MPICH that the pre-built PETSc requires
87+
# with the bundled MPICH in the pre-built PETSc tarball
8888

8989
- name: Download pre-built PETSc STRUMPACK
9090
run: |
@@ -94,20 +94,26 @@ jobs:
9494
mkdir -p /tmp/petsc_strumpack
9595
tar -xzf /tmp/${PETSC_RELEASE_TAG}.tar.gz -C /tmp/petsc_strumpack
9696
echo "Done. Library at /tmp/petsc_strumpack/lib/libpetsc.so"
97+
echo "Contents of lib/:"
9798
ls -la /tmp/petsc_strumpack/lib/
99+
echo "Contents of bin/:"
100+
ls -la /tmp/petsc_strumpack/bin/
98101
env:
99102
GH_TOKEN: ${{ github.token }}
100103

101-
- name: Configure MPI to use system MPICH
104+
- name: Configure MPI to use bundled MPICH
102105
run: |
103-
# Configure MPI.jl to use the system MPICH (same as PETSc build environment)
104-
# CRITICAL: MPICH uses "libmpich", not "libmpi"
106+
# Configure MPI.jl to use the bundled MPICH from the tarball
107+
# This ensures ABI compatibility with the pre-built PETSc
105108
julia --project=. -e "
106109
using Pkg
107110
Pkg.add(\"MPIPreferences\")
108111
using MPIPreferences
109-
# Use libmpich (not libmpi!) - this is how MPICH names its library
110-
MPIPreferences.use_system_binary(library_names=[\"libmpich\"])
112+
# Use bundled libmpi from tarball
113+
MPIPreferences.use_system_binary(
114+
library_names=[\"libmpi\"],
115+
mpiexec=\"/tmp/petsc_strumpack/bin/mpiexec\"
116+
)
111117
# Force MPI to rebuild with new preferences
112118
Pkg.build(\"MPI\"; verbose=true)
113119
"
@@ -127,6 +133,7 @@ jobs:
127133
env:
128134
JULIA_PETSC_LIBRARY: /tmp/petsc_strumpack/lib/libpetsc.so
129135
LD_LIBRARY_PATH: /tmp/petsc_strumpack/lib
136+
MPIEXEC_PATH: /tmp/petsc_strumpack/bin/mpiexec
130137
run: |
131138
# Run tests directly instead of Pkg.test() to preserve LocalPreferences.toml
132139
# (Pkg.test() creates a fresh temp environment that loses MPI preferences)

.github/workflows/build-petsc-strumpack.yml

Lines changed: 12 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -79,13 +79,16 @@ jobs:
7979
INSTALL_DIR="${{ steps.build-info.outputs.install_dir }}"
8080
TAG="${{ steps.build-info.outputs.tag }}"
8181
82-
# Package lib/ directory only (uses system MPI, no bundled mpiexec)
82+
# Package lib/ and bin/ directories (bundled MPICH includes mpiexec)
8383
cd $INSTALL_DIR
8484
echo "Contents of lib/:"
8585
ls -lh lib/
8686
echo ""
87-
echo "Creating tarball of lib/..."
88-
tar -czvf ${TAG}.tar.gz lib/
87+
echo "Contents of bin/:"
88+
ls -lh bin/
89+
echo ""
90+
echo "Creating tarball of lib/ and bin/..."
91+
tar -czvf ${TAG}.tar.gz lib/ bin/
8992
mv ${TAG}.tar.gz $GITHUB_WORKSPACE/
9093
9194
echo "Created: ${TAG}.tar.gz"
@@ -97,26 +100,25 @@ jobs:
97100
tag_name: ${{ steps.build-info.outputs.tag }}
98101
name: PETSc ${{ steps.build-info.outputs.petsc_version }} with STRUMPACK${{ inputs.with_cuda && ' + CUDA' || '' }}
99102
body: |
100-
Pre-built PETSc library with STRUMPACK solver.
103+
Pre-built PETSc library with STRUMPACK solver and bundled MPICH.
101104
102105
**Build Configuration:**
103106
- PETSc version: ${{ steps.build-info.outputs.petsc_version }}
104107
- STRUMPACK: enabled
105108
- CUDA: ${{ inputs.with_cuda && 'enabled' || 'disabled' }}
106109
- Platform: Linux x86_64 (Ubuntu)
107-
- MPI: System MPICH (requires `apt install mpich libmpich-dev`)
110+
- MPI: Bundled MPICH (self-contained, no system MPI required)
108111
109112
**Usage:**
110113
```bash
111-
# Install system MPICH (required)
112-
sudo apt install mpich libmpich-dev
113-
114114
# Download and extract
115115
wget https://github.com/${{ github.repository }}/releases/download/${{ steps.build-info.outputs.tag }}/${{ steps.build-info.outputs.tag }}.tar.gz
116116
tar -xzf ${{ steps.build-info.outputs.tag }}.tar.gz
117117
118-
# Set environment variable before starting Julia
119-
export JULIA_PETSC_LIBRARY=/path/to/petsc_strumpack/lib/libpetsc.so
118+
# Set environment variables before starting Julia
119+
export JULIA_PETSC_LIBRARY=/path/to/lib/libpetsc.so
120+
export LD_LIBRARY_PATH=/path/to/lib:$LD_LIBRARY_PATH
121+
export MPIEXEC_PATH=/path/to/bin/mpiexec
120122
```
121123
files: ${{ steps.build-info.outputs.tag }}.tar.gz
122124
draft: false

src/build_petsc.jl

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -307,17 +307,16 @@ end
307307
function _build_petsc_with_strumpack(src_dir::String, install_dir::String, with_debugging::Bool,
308308
with_cuda::Bool, verbose::Bool)
309309
# Build configuration flags - includes both STRUMPACK and MUMPS
310-
# Use system MPI via compiler wrappers (requires: apt install mpich libmpich-dev OR brew install mpich)
310+
# Bundle MPICH with PETSc for ABI compatibility (avoids mismatch between build/test environments)
311311
configure_flags = [
312312
"--prefix=$install_dir",
313-
"--with-cc=mpicc", # Use MPI C compiler wrapper
314-
"--with-cxx=mpicxx", # Use MPI C++ compiler wrapper
315-
"--with-fc=mpif90", # Use MPI Fortran compiler wrapper (needed for fblaslapack)
313+
"--download-mpich", # Bundle MPICH for guaranteed ABI compatibility
314+
"--with-fc=gfortran", # Fortran needed for fblaslapack
316315
"--with-debugging=$(with_debugging ? 1 : 0)",
317316
"--with-shared-libraries=1",
318317
]
319318
if verbose
320-
@info "Building with system MPI (using mpicc/mpicxx/mpif90 wrappers)"
319+
@info "Building with bundled MPICH (for ABI compatibility)"
321320
end
322321

323322
# Add common dependencies

0 commit comments

Comments
 (0)