Skip to content

Commit 2405525

Browse files
author
Jeremy E Kozdon
committed
Add a parallel matrix type and test
1 parent aad1ec4 commit 2405525

4 files changed

Lines changed: 273 additions & 12 deletions

File tree

src/mat.jl

Lines changed: 156 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -100,6 +100,109 @@ function MatSeqDense(
100100
return mat
101101
end
102102

103+
"""
104+
MatAIJ(
105+
petsclib::PetscLib,
106+
comm::MPI.Comm,
107+
loc_num_rows::Integer,
108+
loc_num_cols::Integer,
109+
diag_nonzeros::Union{Integer, Vector},
110+
off_diag_nonzeros::Union{Integer, Vector};
111+
glo_num_rows = PETSC_DETERMINE,
112+
glo_num_cols = PETSC_DETERMINE,
113+
setup = true
114+
) where {PetscLib <: PetscLibType}
115+
116+
Create an MPI PETSc sparse array on the `comm` using AIJ format (also known as a
117+
compressed sparse row or CSR format) of size `glo_num_rows X glo_num_cols` with
118+
local size `loc_num_rows X loc_num_cols`.
119+
120+
The diagonal block and off-diagonal block non-zeros are `diag_nonzeros` and
121+
`off_diag_nonzeros` which can be either an integer (same for all rows) or a
122+
Vector of `PetscInt`s with on entry per row.
123+
124+
Memory allocation is handled by PETSc and garbage collection can be used.
125+
126+
If `glo_num_rows isa Integer` or `glo_num_cols isa Integer` then the
127+
corresponding local variable can be `PETSC_DECIDE`.
128+
129+
If `setup == true` then [`setup!`](@ref) is called
130+
131+
# External Links
132+
$(_doc_external("Mat/MatCreateAIJ"))
133+
$(_doc_external("Mat/MatSetUp"))
134+
135+
!!! note
136+
137+
The user is responsible for calling `destroy(mat)` on the `MatAIJ` since
138+
this cannot be handled by the garbage collector do to the MPI nature of the
139+
object.
140+
"""
141+
mutable struct MatAIJ{PetscLib, PetscScalar} <:
142+
AbstractMat{PetscLib, PetscScalar}
143+
ptr::CMat
144+
end
145+
146+
function MatAIJ(
147+
petsclib::PetscLib,
148+
comm::MPI.Comm,
149+
loc_num_rows::Integer,
150+
loc_num_cols::Integer,
151+
diag_nonzeros::Union{Integer, Vector},
152+
off_diag_nonzeros::Union{Integer, Vector};
153+
glo_num_rows = PETSC_DETERMINE,
154+
glo_num_cols = PETSC_DETERMINE,
155+
setup = true,
156+
) where {PetscLib <: PetscLibType}
157+
@assert initialized(petsclib)
158+
PetscScalar = petsclib.PetscScalar
159+
mat = MatAIJ{PetscLib, PetscScalar}(C_NULL)
160+
if diag_nonzeros isa Integer
161+
diag_nonzero = diag_nonzeros
162+
diag_nonzeros = C_NULL
163+
else
164+
diag_nonzero = -1
165+
end
166+
if off_diag_nonzeros isa Integer
167+
off_diag_nonzero = off_diag_nonzeros
168+
off_diag_nonzeros = C_NULL
169+
else
170+
off_diag_nonzero = -1
171+
end
172+
173+
LibPETSc.MatCreateAIJ(
174+
petsclib,
175+
comm,
176+
loc_num_rows,
177+
loc_num_cols,
178+
glo_num_rows,
179+
glo_num_cols,
180+
diag_nonzero,
181+
diag_nonzeros,
182+
off_diag_nonzero,
183+
off_diag_nonzeros,
184+
mat,
185+
)
186+
187+
setup && setup!(mat)
188+
189+
return mat
190+
end
191+
192+
"""
193+
setup!(mat::AbstractMat)
194+
195+
Set up the interal data for `mat`
196+
197+
# External Links
198+
$(_doc_external("Mat/MatSetUp"))
199+
"""
200+
function setup!(mat::AbstractMat{PetscLib}) where {PetscLib}
201+
@assert initialized(PetscLib)
202+
LibPETSc.MatSetUp(PetscLib, mat)
203+
return mat
204+
end
205+
103206
"""
104207
assemble!(M::AbstractMat[, t::MatAssemblyType = MAT_FINAL_ASSEMBLY)
105208
@@ -151,6 +254,35 @@ function assemblyend!(
151254
return M
152255
end
153256

257+
"""
258+
ownershiprange(mat::AbstractMat, [base_one = true])
259+
260+
The range of row indices owned by this processor, assuming that the `mat` is
261+
laid out with the first `n1` rows on the first processor, next `n2` rows on the
262+
second, etc. For certain parallel layouts this range may not be well defined.
263+
264+
If the optional argument `base_one == true` then base-1 indexing is used,
265+
otherwise base-0 index is used.
266+
267+
!!! note
268+
269+
unlike the C function, the range returned is inclusive (`idx_first:idx_last`)
270+
271+
# External Links
272+
$(_doc_external("Mat/MatGetOwnershipRange"))
273+
"""
274+
function ownershiprange(
275+
mat::AbstractMat{PetscLib},
276+
base_one::Bool = true,
277+
) where {PetscLib}
278+
PetscInt = PetscLib.PetscInt
279+
r_lo = Ref{PetscInt}()
280+
r_hi = Ref{PetscInt}()
281+
LibPETSc.MatGetOwnershipRange(PetscLib, mat, r_lo, r_hi)
282+
return base_one ? ((r_lo[] + PetscInt(1)):(r_hi[])) :
283+
((r_lo[]):(r_hi[] - PetscInt(1)))
284+
end
285+
154286
function Base.size(A::AbstractMat{PetscLib}) where {PetscLib}
155287
m = Ref{PetscLib.PetscInt}()
156288
n = Ref{PetscLib.PetscInt}()
@@ -342,3 +474,27 @@ function Base.copyto!(M::PETSc.MatSeqAIJ{T}, S::SparseMatrixCSC{T}) where {T}
342474
assemble(M);
343475
end
344476
=#
477+
478+
"""
479+
createvecs(
480+
M::AbstractMat{PetscLib},
481+
)
482+
483+
Returns vectors `V` which are compatible with `M`. A right compatible vectors is
484+
`V.right` and a left compatible vector is `V.left`; positionally these are
485+
returned as `(right, left)`
486+
487+
The created vectors are not garbage collected and should be destroyed with
488+
[`destroy`](@ref).
489+
490+
# External Links
491+
$(_doc_external("Mat/MatCreateVecs"))
492+
"""
493+
function createvecs(M::AbstractMat{PetscLib}) where {PetscLib}
494+
r_right = Ref{CVec}()
495+
r_left = Ref{CVec}()
496+
LibPETSc.MatCreateVecs(PetscLib, M, r_right, r_left)
497+
right = VecPtr(PetscLib, r_right[])
498+
left = VecPtr(PetscLib, r_left[])
499+
return (right = right, left = left)
500+
end

src/vec.jl

Lines changed: 25 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -239,7 +239,7 @@ function setvalues!(
239239
idxs0::Vector{PetscInt},
240240
vals::Array{PetscScalar},
241241
insertmode::InsertMode;
242-
num_idxs = length(idxs0)
242+
num_idxs = length(idxs0),
243243
) where {PetscLib, PetscInt, PetscScalar}
244244
@assert length(vals) >= num_idxs
245245
@assert PetscInt == PetscLib.PetscInt
@@ -267,7 +267,7 @@ function getvalues!(
267267
vals::Array{PetscScalar},
268268
v::AbstractVec{PetscLib},
269269
idxs0::Vector{PetscInt};
270-
num_idxs = length(idxs0)
270+
num_idxs = length(idxs0),
271271
) where {PetscLib, PetscInt, PetscScalar}
272272
@assert length(vals) >= num_idxs
273273
@assert PetscInt == PetscLib.PetscInt
@@ -425,7 +425,7 @@ end
425425
!!! note
426426
`Base.finalize` is automatically called on the array.
427427
"""
428-
function with_localarray!(f!, vecs::NTuple{N, AbstractVec}; kwargs...) where N
428+
function with_localarray!(f!, vecs::NTuple{N, AbstractVec}; kwargs...) where {N}
429429
arrays = map(vecs) do v
430430
unsafe_localarray(v; kwargs...)
431431
end
@@ -462,6 +462,23 @@ function assemblyend!(vec::AbstractVec{PetscLib}) where {PetscLib}
462462
return nothing
463463
end
464464

465+
"""
466+
assemble!(v::AbstractVec)
467+
468+
Assemble the vector `v`.
469+
470+
For overlapping assembly see [`assemblybegin!`](@ref) and [`assemblyend!`](@ref)
471+
472+
# External Links
473+
$(_doc_external("Vec/VecAssemblyBegin"))
474+
$(_doc_external("Vec/VecAssemblyEnd"))
475+
"""
476+
function assemble!(v::AbstractVec)
477+
assemblybegin!(v)
478+
assemblyend!(v)
479+
return v
480+
end
481+
465482
mutable struct LocalVec{PetscLib, PetscScalar, GVec} <:
466483
AbstractVec{PetscLib, PetscScalar}
467484
ptr::CVec
@@ -485,7 +502,7 @@ Obtains the local ghosted representation of a [`Vec`](@ref).
485502
# External Links
486503
$(_doc_external("Vec/VecGhostGetLocalForm"))
487504
"""
488-
function getlocalform(gvec::AbstractVec{PetscLib}) where PetscLib
505+
function getlocalform(gvec::AbstractVec{PetscLib}) where {PetscLib}
489506
lvec = LocalVec(gvec)
490507
LibPETSc.VecGhostGetLocalForm(PetscLib, gvec, lvec)
491508
if lvec.ptr == C_NULL
@@ -504,7 +521,7 @@ Restore the `local_vec` to the associated global vector after a call to
504521
# External Links
505522
$(_doc_external("Vec/VecGhostRestoreLocalForm"))
506523
"""
507-
function restorelocalform!(lvec::LocalVec{PetscLib}) where PetscLib
524+
function restorelocalform!(lvec::LocalVec{PetscLib}) where {PetscLib}
508525
LibPETSc.VecGhostRestoreLocalForm(PetscLib, lvec.gvec, lvec)
509526
lvec.ptr = C_NULL
510527
return lvec.gvec
@@ -548,7 +565,7 @@ function ghostupdatebegin!(
548565
vec::AbstractVec{PetscLib},
549566
insertmode = INSERT_VALUES,
550567
scattermode = SCATTER_FORWARD,
551-
) where PetscLib
568+
) where {PetscLib}
552569
LibPETSc.VecGhostUpdateBegin(PetscLib, vec, insertmode, scattermode)
553570
return nothing
554571
end
@@ -569,13 +586,12 @@ function ghostupdateend!(
569586
vec::AbstractVec{PetscLib},
570587
insertmode = INSERT_VALUES,
571588
scattermode = SCATTER_FORWARD,
572-
) where PetscLib
589+
) where {PetscLib}
573590
LibPETSc.VecGhostUpdateEnd(PetscLib, vec, insertmode, scattermode)
574591
return nothing
575592
end
576593

577-
578-
function getpetsctype(vec::AbstractVec{PetscLib}) where PetscLib
594+
function getpetsctype(vec::AbstractVec{PetscLib}) where {PetscLib}
579595
name_r = Ref{LibPETSc.VecType}()
580596
LibPETSc.VecGetType(PetscLib, vec, name_r)
581597
return unsafe_string(name_r[])

test/mpimat.jl

Lines changed: 86 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,86 @@
1+
using Test
2+
using MPI
3+
MPI.Initialized() || MPI.Init()
4+
using PETSc
5+
using LinearAlgebra: mul!, norm
6+
7+
@testset "MatAIJ" begin
8+
comm = MPI.COMM_WORLD
9+
mpisize = MPI.Comm_size(comm)
10+
mpirank = MPI.Comm_rank(comm)
11+
12+
for petsclib in PETSc.petsclibs
13+
PETSc.initialize(petsclib)
14+
PetscScalar = petsclib.PetscScalar
15+
PetscInt = petsclib.PetscInt
16+
17+
loc_num_rows = 5
18+
loc_num_cols = 5
19+
diag_nonzeros = 3
20+
off_diag_non_zeros = 3
21+
22+
mat = PETSc.MatAIJ(
23+
petsclib,
24+
comm,
25+
loc_num_rows,
26+
loc_num_cols,
27+
diag_nonzeros,
28+
off_diag_non_zeros,
29+
)
30+
31+
# Get compatible vectors
32+
(right, left) = PETSc.createvecs(mat)
33+
34+
# Fill the matrix and right vector
35+
row_rng = PETSc.ownershiprange(mat, false)
36+
for i in row_rng
37+
if i == 0
38+
vals = [-2, 1]
39+
row0idxs = [i]
40+
col0idxs = [i, i + 1]
41+
elseif i == mpisize * loc_num_rows - 1
42+
vals = [-2, 1]
43+
row0idxs = [i]
44+
col0idxs = [i, i - 1]
45+
else
46+
vals = [1, -2, 1]
47+
row0idxs = [i]
48+
col0idxs = [i - 1, i, i + 1]
49+
end
50+
PETSc.setvalues!(
51+
mat,
52+
PetscInt.(row0idxs),
53+
PetscInt.(col0idxs),
54+
PetscScalar.(vals),
55+
)
56+
right[i + 1] = i^3
57+
end
58+
PETSc.assemble!(mat)
59+
PETSc.assemble!(right)
60+
61+
# Do matrix multiply and check result
62+
mul!(left, mat, right)
63+
for i in row_rng
64+
if i == 0
65+
v = -2 * i^3 + (i + 1)^3
66+
elseif i == mpisize * loc_num_rows - 1
67+
v = (i - 1)^3 - 2 * i^3
68+
else
69+
v = (i - 1)^3 - 2 * i^3 + (i + 1)^3
70+
end
71+
@test v == left[i + 1]
72+
end
73+
74+
# Check the norm in parallel
75+
sz = loc_num_rows * mpisize
76+
exact_norm = sqrt(sz * 2^2 + 2 * (sz - 1))
77+
@test norm(mat) exact_norm
78+
79+
PETSc.destroy(mat)
80+
PETSc.destroy(right)
81+
PETSc.destroy(left)
82+
83+
PETSc.finalize(petsclib)
84+
end
85+
end
86+
nothing

test/runtests.jl

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,12 @@ using MPI: mpiexec
33

44
# Do the MPI tests first so we do not have mpi running inside MPI
55
@testset "mpi tests" begin
6-
@test mpiexec() do mpi_cmd
7-
cmd = `$mpi_cmd -n 4 $(Base.julia_cmd()) --startup-file=no --project mpivec.jl`
8-
success(pipeline(cmd, stderr = stderr))
6+
for file in ("mpivec.jl", "mpimat.jl")
7+
@test mpiexec() do mpi_cmd
8+
cmd =
9+
`$mpi_cmd -n 4 $(Base.julia_cmd()) --startup-file=no --project $file`
10+
success(pipeline(cmd, stderr = stderr))
11+
end
912
end
1013
end
1114

0 commit comments

Comments
 (0)