diff --git a/Project.toml b/Project.toml index 640e857c..b5d5c2fe 100644 --- a/Project.toml +++ b/Project.toml @@ -16,7 +16,8 @@ julia = "1.3" [extras] ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" +Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7" UnicodePlots = "b8865327-cd53-5732-bb35-84acbb429228" [targets] -test = ["ForwardDiff", "UnicodePlots"] +test = ["ForwardDiff", "UnicodePlots", "Printf"] diff --git a/examples/ksp/ex50.jl b/examples/ksp/ex50.jl new file mode 100644 index 00000000..790fb1bd --- /dev/null +++ b/examples/ksp/ex50.jl @@ -0,0 +1,178 @@ +# INCLUDE IN MPI TEST + +using PETSc, MPI, Printf +MPI.Initialized() || MPI.Init() +PETSc.initialize() + +# Set up the RHS for a cell centered scheme for PDE +# +# Δu = -cos(π * x) * cos(π * y) +# +# with zeros Neumann boundary conditions +function rhs!( + ksp::PETSc.AbstractKSP{PetscScalar}, + b_vec::PETSc.AbstractVec{PetscScalar}, +) where {PetscScalar} + dm = PETSc.DMDA(ksp) + comm = PETSc.getcomm(ksp) + corners = PETSc.getcorners(dm) + global_size = PETSc.getinfo(dm).global_size[1:2] + + # Grid spacing in each direction + h = PetscScalar(1) ./ global_size + + # Build the RHS vector for the cell-centered grid. Since the b_vec is not a + # julia vector but a PETSc vector we need to convert it before operating on + # it + PETSc.map_unsafe_localarray!(b_vec; read = false) do b + b = reshape(b, Int64(corners.size[1]), Int64(corners.size[2])) + for (iy, y) in enumerate(corners.lower[2]:corners.upper[2]) + y = (2y - 1) * h[2] / 2 + for (ix, x) in enumerate(corners.lower[1]:corners.upper[1]) + x = (2x - 1) * h[1] / 2 + b[ix, iy] = -cos(π * x) * cos(π * y) * h[1] * h[2] + end + end + end + + # Assemble the PETSc vector + PETSc.assemblybegin(b_vec) + PETSc.assemblyend(b_vec) + + # Hack to remove constant from the vector since we are using all Neumann + # boundary conditions + nullspace = PETSc.MatNullSpace{PetscScalar}(comm, PETSc.PETSC_TRUE) + PETSc.MatNullSpaceRemove!(nullspace, b_vec) + PETSc.destroy(nullspace) + + # zero is the PETSc sucess code + return 0 +end + +function jacobian!( + ksp::PETSc.AbstractKSP{PetscScalar}, + J::PETSc.AbstractMat{PetscScalar}, + jac::PETSc.AbstractMat{PetscScalar}, +) where {PetscScalar} + dm = PETSc.DMDA(ksp) + corners = PETSc.getcorners(dm) + PetscInt = eltype(corners.size) + + global_size = PETSc.getinfo(dm).global_size[1:2] + + # Grid spacing in each direction + h = PetscScalar(1) ./ global_size + + # XXX: Would Mvectors be better? + Sten = PETSc.MatStencil{PetscInt} + col = Vector{Sten}(undef, 5) + row = Vector{Sten}(undef, 1) + val = Vector{PetscScalar}(undef, 5) + + for j in corners.lower[2]:corners.upper[2] + for i in corners.lower[1]:corners.upper[1] + row[1] = Sten(i = i, j = j) + num = 1 + fill!(val, 0) + if i > 1 + val[num] = -h[2] / h[1] + col[num] = Sten(i = i - 1, j = j) + num += 1 + end + if i < global_size[1] + val[num] = -h[2] / h[1] + col[num] = Sten(i = i + 1, j = j) + num += 1 + end + if j > 1 + val[num] = -h[1] / h[2] + col[num] = Sten(i = i, j = j - 1) + num += 1 + end + if j < global_size[2] + val[num] = -h[1] / h[2] + col[num] = Sten(i = i, j = j + 1) + num += 1 + end + val[num] = -sum(val) + col[num] = Sten(i = i, j = j) + PETSc.MatSetValuesStencil!( + jac, + row, + col, + val, + PETSc.INSERT_VALUES; + num_cols = num, + ) + end + end + + PETSc.assemblybegin(jac) + PETSc.assemblyend(jac) + + # Since we have all Neumann there is constant nullspace + comm = PETSc.getcomm(ksp) + nullspace = PETSc.MatNullSpace{PetscScalar}(comm, PETSc.PETSC_TRUE) + PETSc.MatSetNullSpace!(J, nullspace) + PETSc.destroy(nullspace) + return 0 +end + +function main(petsclib; comm = MPI.COMM_WORLD, options...) + boundary_type = (PETSc.DM_BOUNDARY_NONE, PETSc.DM_BOUNDARY_NONE) + stencil_type = PETSc.DMDA_STENCIL_STAR + global_size = (11, 11) + procs = (PETSc.PETSC_DECIDE, PETSc.PETSC_DECIDE) + dof_per_node = 1 + stencil_width = 1 + points_per_proc = (nothing, nothing) + + opts = if MPI.Comm_size(comm) == 1 + ( + ksp_monitor = true, + ksp_view = true, + da_grid_x = 100, + da_grid_y = 100, + pc_type = "mg", + pc_mg_levels = 1, + mg_levels_0_pc_type = "ilu", + mg_levels_0_pc_factor_levels = 1, + ) + else + ( + da_grid_x = 3, + da_grid_y = 3, + pc_type = "mg", + da_refine = 10, + ksp_monitor = nothing, + ksp_view = nothing, + log_view = nothing, + ) + end + + da = PETSc.DMDACreate2d( + petsclib, + comm, + boundary_type..., + stencil_type, + global_size..., + procs..., + dof_per_node, + stencil_width, + points_per_proc...; + opts..., + ) + + ksp = PETSc.KSP(da; opts...) + + PETSc.KSPSetComputeRHS!(ksp, rhs!) + PETSc.KSPSetComputeOperators!(ksp, jacobian!) + PETSc.setfromoptions!(ksp) + PETSc.solve!(ksp) +end + +for petsclib in PETSc.petsclibs + if PETSc.scalartype(petsclib) <: Real + main(petsclib) + end +end diff --git a/src/PETSc.jl b/src/PETSc.jl index be68d74d..e9ef376f 100644 --- a/src/PETSc.jl +++ b/src/PETSc.jl @@ -28,6 +28,8 @@ include("options.jl") include("vec.jl") include("mat.jl") include("matshell.jl") +include("dm.jl") +include("dmda.jl") include("ksp.jl") include("pc.jl") include("snes.jl") diff --git a/src/const.jl b/src/const.jl index 2328e91b..f538bf81 100644 --- a/src/const.jl +++ b/src/const.jl @@ -12,6 +12,7 @@ macro chk(expr) end const PETSC_DEFAULT = -2 +const PETSC_DECIDE = PETSC_DETERMINE = -1 @enum PetscBool PETSC_FALSE PETSC_TRUE @@ -115,4 +116,17 @@ end MATOP_SOLVE_ADD=8 MATOP_SOLVE_TRANSPOSE=9 MATOP_SOLVE_TRANSPOSE_ADD=10 -end \ No newline at end of file +end + +@enum DMBoundaryType begin + DM_BOUNDARY_NONE=0 + DM_BOUNDARY_GHOSTED=1 + DM_BOUNDARY_MIRROR=2 + DM_BOUNDARY_PERIODIC=3 + DM_BOUNDARY_TWIST=4 +end + +@enum DMDAStencilType begin + DMDA_STENCIL_STAR = 0 + DMDA_STENCIL_BOX = 1 +end diff --git a/src/dm.jl b/src/dm.jl new file mode 100644 index 00000000..53d23d46 --- /dev/null +++ b/src/dm.jl @@ -0,0 +1,345 @@ +const CDM = Ptr{Cvoid} +abstract type AbstractDM{PetscLib} end + +""" + DMLocalVec(v::CVec, dm::AbstractDM) + +Container for an PETSc vector we know is "local" + +# External Links +$(_doc_external("Vec/Vec")) +""" +mutable struct DMLocalVec{PetscLib, T, T_DM} <: AbstractVec{T} + ptr::CVec + dm::T_DM + function DMLocalVec(ptr, dm::AbstractDM{PetscLib}) where {PetscLib} + new{PetscLib, scalartype(PetscLib), typeof(dm)}(ptr, dm) + end +end + +""" + DMGlobalVec(v::CVec, dm::AbstractDM) + +Container for an PETSc vector we know is "global" + +# External Links +$(_doc_external("Vec/Vec")) +""" +mutable struct DMGlobalVec{PetscLib, T, T_DM} <: AbstractVec{T} + ptr::CVec + dm::T_DM + function DMGlobalVec(ptr, dm::AbstractDM{PetscLib}) where {PetscLib} + new{PetscLib, scalartype(PetscLib), typeof(dm)}(ptr, dm) + end +end + +# Mainly for DM we do not know the type of, namely ones returned by PETSc +# functions such as `KSPGetDM` +mutable struct PetscDM{PetscLib} <: AbstractDM{PetscLib} + ptr::CDM +end + +""" + setup!(da::DM, opts=da.opts) + +# External Links +$(_doc_external("DM/DMSetUp")) +""" +function setup! end + +@for_petsc function setup!(da::AbstractDM{$PetscLib}, opts::Options = da.opts) + with(opts) do + @chk ccall((:DMSetUp, $petsc_library), PetscErrorCode, (CDM,), da) + end +end + +""" + setfromoptions!(da::DM, opts=da.opts) + +# External Links +$(_doc_external("DM/DMSetFromOptions")) +""" +function setfromoptions! end + +@for_petsc function setfromoptions!( + da::AbstractDM{$PetscLib}, + opts::Options = da.opts, +) + with(opts) do + @chk ccall( + (:DMSetFromOptions, $petsc_library), + PetscErrorCode, + (CDM,), + da, + ) + end +end + +@for_petsc begin + function destroy(da::AbstractDM{$PetscLib}) + finalized($PetscLib) || @chk ccall( + (:DMDestroy, $petsc_library), + PetscErrorCode, + (Ptr{CDM},), + da, + ) + da.ptr = C_NULL + return nothing + end +end + +""" + getdimension(dm::AbstractDM) + +Return the topological dimension of the `dm` + +# External Links +$(_doc_external("DM/DMGetDimension")) +""" +function getdimension(::AbstractDM) end + +@for_petsc function getdimension(dm::AbstractDM{$PetscLib}) + dim = Ref{$PetscInt}() + + @chk ccall( + (:DMGetDimension, $petsc_library), + PetscErrorCode, + (CDM, Ptr{$PetscInt}), + dm, + dim, + ) + + return dim[] +end + +""" + gettype(dm::AbstractDM) + +Gets type name of the `dm` + +# External Links +$(_doc_external("DM/DMGetType")) +""" +function gettype(::AbstractDM) end + +@for_petsc function gettype(dm::AbstractDM{$PetscLib}) + t_r = Ref{Cstring}() + @chk ccall( + (:DMGetType, $petsc_library), + PetscErrorCode, + (CDM, Ptr{Cstring}), + dm, + t_r, + ) + return unsafe_string(t_r[]) +end + +""" + view(dm::AbstractDM, viewer::Viewer=ViewerStdout(petsclib, getcomm(dm))) + +view a `dm` with `viewer` + +# External Links +$(_doc_external("DM/DMView")) +""" +function view(::AbstractDM) end + +@for_petsc function view( + dm::AbstractDM{$PetscLib}, + viewer::AbstractViewer{$PetscLib} = ViewerStdout($petsclib, getcomm(dm)), +) + @chk ccall( + (:DMView, $petsc_library), + PetscErrorCode, + (CDM, CPetscViewer), + dm, + viewer, + ) + return nothing +end + +""" + creatematrix(dm::AbstractDM) + +Generates a matrix from the `dm` object. + +# External Links +$(_doc_external("DM/DMCreateMatrix")) +""" +function creatematrix end + +@for_petsc function creatematrix(dm::AbstractDM{$PetscLib}) + mat = Mat{$PetscScalar}(C_NULL) + + @chk ccall( + (:DMCreateMatrix, $petsc_library), + PetscErrorCode, + (CDM, Ptr{CMat}), + dm, + mat, + ) + + return mat +end + +""" + createlocalvector(dm::AbstractDM) + +returns a local vector from the `dm` object. + +# External Links +$(_doc_external("DM/DMCreateLocalVector")) +""" +function createlocalvector end + +@for_petsc function createlocalvector(dm::AbstractDM{$PetscLib}) + vec = DMLocalVec(C_NULL, dm) + + @chk ccall( + (:DMCreateLocalVector, $petsc_library), + PetscErrorCode, + (CDM, Ptr{CVec}), + dm, + vec, + ) + + return vec +end + +""" + createglobalvector(dm::DM; write::Bool = true, read::Bool = true) + +returns a global vector from the `dm` object. + +# External Links +$(_doc_external("DM/DMCreateGlobalVector")) +""" +function createglobalvector end + +@for_petsc function createglobalvector(dm::AbstractDM{$PetscLib}) + vec = DMGlobalVec(C_NULL, dm) + + @chk ccall( + (:DMCreateGlobalVector, $petsc_library), + PetscErrorCode, + (CDM, Ptr{CVec}), + dm, + vec, + ) + + return vec +end + +""" + update!( + global_vec::DMGlobalVec, + local_vec::DMLocalVec, + mode::InsertMode, + ) + +Updates `global_vec` from `local_vec` with insert `mode` + +# External Links +$(_doc_external("DM/DMLocalToGlobal")) +""" +update!(::DMGlobalVec, ::DMLocalVec, ::InsertMode) + +@for_petsc function update!( + global_vec::DMGlobalVec{$PetscLib}, + local_vec::DMLocalVec{$PetscLib}, + mode::InsertMode, +) + @assert local_vec.dm === global_vec.dm + @chk ccall( + (:DMLocalToGlobal, $petsc_library), + PetscErrorCode, + (CDM, CVec, InsertMode, CVec), + local_vec.dm, + local_vec, + mode, + global_vec, + ) + + return nothing +end + +""" + update!( + local_vec::DMLocalVec, + global_vec::DMGlobalVec, + mode::InsertMode, + ) + +Updates `local_vec` from `global_vec` with insert `mode` + +# External Links +$(_doc_external("DM/DMGlobalToLocal")) +""" +update!(::DMLocalVec, ::DMGlobalVec, ::InsertMode) + +@for_petsc function update!( + local_vec::DMLocalVec{$PetscLib}, + global_vec::DMGlobalVec{$PetscLib}, + mode::InsertMode, +) + @assert local_vec.dm === global_vec.dm + @chk ccall( + (:DMGlobalToLocal, $petsc_library), + PetscErrorCode, + (CDM, CVec, InsertMode, CVec), + global_vec.dm, + global_vec, + mode, + local_vec, + ) + + return nothing +end + +""" + getcoordinateDM(dm::AbstractDM) + +Create a `coord_dm` for the coordinates of `dm`. + +# External Links +$(_doc_external("DM/DMGetCoordinateDM")) +""" +function getcoordinateDM end + +@for_petsc function getcoordinateDM(dm::AbstractDM{$PetscLib}) + coord_dm = empty(dm) + @chk ccall( + (:DMGetCoordinateDM, $petsc_library), + PetscErrorCode, + (CDM, Ptr{CDM}), + dm, + coord_dm, + ) + # If this fails then the `empty` call above is probably a bad idea! + @assert gettype(dm) == gettype(coord_dm) + + return coord_dm +end + +""" + getcoordinateslocal(dm::AbstractDM) + +Gets a local vector with the coordinates associated with `dm`. + +# External Links +$(_doc_external("DM/DMGetCoordinatesLocal")) +""" +function getcoordinateslocal end + +@for_petsc function getcoordinateslocal(dm::AbstractDM{$PetscLib}) + coord_vec = DMLocalVec(C_NULL, dm) + @chk ccall( + (:DMGetCoordinatesLocal, $petsc_library), + PetscErrorCode, + (CDM, Ptr{CVec}), + dm, + coord_vec, + ) + + return coord_vec +end diff --git a/src/dmda.jl b/src/dmda.jl new file mode 100644 index 00000000..f25332c3 --- /dev/null +++ b/src/dmda.jl @@ -0,0 +1,544 @@ +mutable struct DMDALocalInfo{IT} + dim::IT + dof_per_node::IT + stencil_width::IT + global_size::NTuple{3, IT} + local_start::NTuple{3, IT} + local_size::NTuple{3, IT} + ghosted_local_start::NTuple{3, IT} + ghosted_local_size::NTuple{3, IT} + boundary_type::NTuple{3, DMBoundaryType} + stencil_type::DMDAStencilType + ___padding___::NTuple{5, IT} + DMDALocalInfo{IT}() where {IT} = new{IT}() +end + +mutable struct DMDA{PetscLib} <: AbstractDM{PetscLib} + ptr::CDM + opts::Options{PetscLib} + DMDA{PetscLib}(ptr, opts = Options(PetscLib)) where {PetscLib} = + new{PetscLib}(ptr, opts) +end + +""" + empty(da::DMDA) + +return an uninitialized `DMDA` struct. +""" +Base.empty(::DMDA{PetscLib}) where {PetscLib} = DMDA{PetscLib}(C_NULL) + +""" + DMDACreate1d( + ::PetscLib + comm::MPI.Comm, + boundary_type::DMBoundaryType, + global_dim, + dof_per_node, + stencil_width, + points_per_proc::Union{Nothing, Vector{PetscInt}}; + dmsetfromoptions=true, + dmsetup=true, + options... + ) + +Creates a 1-D distributed array with the options specified using keyword +arguments. + +If keyword argument `dmsetfromoptions == true` then `setfromoptions!` called. +If keyword argument `dmsetup == true` then `setup!` is called. + +# External Links +$(_doc_external("DMDA/DMDACreate1d")) +""" +function DMDACreate1d end + +@for_petsc function DMDACreate1d( + ::$UnionPetscLib, + comm::MPI.Comm, + boundary_type::DMBoundaryType, + global_dim, + dof_per_node, + stencil_width, + points_per_proc::Union{Nothing, Vector{$PetscInt}}; + dmsetfromoptions = true, + dmsetup = true, + options..., +) + opts = Options($petsclib, options...) + ref_points_per_proc = if isnothing(points_per_proc) + C_NULL + else + @assert length(points_per_proc) == MPI.Comm_size(comm) + points_per_proc + end + da = DMDA{$PetscLib}(C_NULL, opts) + with(da.opts) do + @chk ccall( + (:DMDACreate1d, $petsc_library), + PetscErrorCode, + ( + MPI.MPI_Comm, + DMBoundaryType, + $PetscInt, + $PetscInt, + $PetscInt, + Ptr{$PetscInt}, + Ptr{CDM}, + ), + comm, + boundary_type, + global_dim, + dof_per_node, + stencil_width, + ref_points_per_proc, + da, + ) + end + dmsetfromoptions && setfromoptions!(da) + dmsetup && setup!(da) + # We can only let the garbage collect finalize when we do not need to + # worry about MPI (since garbage collection is asyncronous) + if comm == MPI.COMM_SELF + finalizer(destroy, da) + end + return da +end + +""" + DMDACreate2d( + ::PetscLib + comm::MPI.Comm, + boundary_type_x::DMBoundaryType, + boundary_type_y::DMBoundaryType, + stencil_type::DMDAStencilType, + global_dim_x, + global_dim_y, + procs_x, + procs_y, + dof_per_node, + stencil_width, + points_per_proc_x::Union{Nothing, Vector{PetscInt}}; + points_per_proc_y::Union{Nothing, Vector{PetscInt}}; + dmsetfromoptions=true, + dmsetup=true, + options... + ) + +Creates a 2-D distributed array with the options specified using keyword +arguments. + +If keyword argument `dmsetfromoptions == true` then `setfromoptions!` called. +If keyword argument `dmsetup == true` then `setup!` is called. + +# External Links +$(_doc_external("DMDA/DMDACreate2d")) +""" +function DMDACreate2d end + +@for_petsc function DMDACreate2d( + ::$UnionPetscLib, + comm::MPI.Comm, + boundary_type_x::DMBoundaryType, + boundary_type_y::DMBoundaryType, + stencil_type::DMDAStencilType, + global_dim_x, + global_dim_y, + procs_x, + procs_y, + dof_per_node, + stencil_width, + points_per_proc_x::Union{Nothing, Vector{$PetscInt}}, + points_per_proc_y::Union{Nothing, Vector{$PetscInt}}; + dmsetfromoptions = true, + dmsetup = true, + options..., +) + opts = Options($petsclib, options...) + ref_points_per_proc_x = if isnothing(points_per_proc_x) + C_NULL + else + @assert length(points_per_proc_x) == procs_x + points_per_proc_x + end + ref_points_per_proc_y = if isnothing(points_per_proc_y) + C_NULL + else + @assert length(points_per_proc_y) == procs_y + points_per_proc_y + end + da = DMDA{$PetscLib}(C_NULL, opts) + with(da.opts) do + @chk ccall( + (:DMDACreate2d, $petsc_library), + PetscErrorCode, + ( + MPI.MPI_Comm, + DMBoundaryType, + DMBoundaryType, + DMDAStencilType, + $PetscInt, + $PetscInt, + $PetscInt, + $PetscInt, + $PetscInt, + $PetscInt, + Ptr{$PetscInt}, + Ptr{$PetscInt}, + Ptr{CDM}, + ), + comm, + boundary_type_x, + boundary_type_y, + stencil_type, + global_dim_x, + global_dim_y, + procs_x, + procs_y, + dof_per_node, + stencil_width, + ref_points_per_proc_x, + ref_points_per_proc_y, + da, + ) + end + dmsetfromoptions && setfromoptions!(da) + dmsetup && setup!(da) + # We can only let the garbage collect finalize when we do not need to + # worry about MPI (since garbage collection is asyncronous) + if comm == MPI.COMM_SELF + finalizer(destroy, da) + end + return da +end + +""" + DMDACreate3d( + ::PetscLib + comm::MPI.Comm, + boundary_type_x::DMBoundaryType, + boundary_type_y::DMBoundaryType, + boundary_type_z::DMBoundaryType, + stencil_type::DMDAStencilType, + global_dim_x, + global_dim_y, + global_dim_z, + procs_x, + procs_y, + procs_z, + global_dim_z, + dof_per_node, + stencil_width, + points_per_proc_x::Union{Nothing, Vector{PetscInt}}; + points_per_proc_y::Union{Nothing, Vector{PetscInt}}; + points_per_proc_z::Union{Nothing, Vector{PetscInt}}; + dmsetfromoptions=true, + dmsetup=true, + options... + ) + +Creates a 3-D distributed array with the options specified using keyword +arguments. + +If keyword argument `dmsetfromoptions == true` then `setfromoptions!` called. +If keyword argument `dmsetup == true` then `setup!` is called. + +# External Links +$(_doc_external("DMDA/DMDACreate3d")) +""" +function DMDACreate3d end + +@for_petsc function DMDACreate3d( + ::$UnionPetscLib, + comm::MPI.Comm, + boundary_type_x::DMBoundaryType, + boundary_type_y::DMBoundaryType, + boundary_type_z::DMBoundaryType, + stencil_type::DMDAStencilType, + global_dim_x, + global_dim_y, + global_dim_z, + procs_x, + procs_y, + procs_z, + dof_per_node, + stencil_width, + points_per_proc_x::Union{Nothing, Vector{$PetscInt}}, + points_per_proc_y::Union{Nothing, Vector{$PetscInt}}, + points_per_proc_z::Union{Nothing, Vector{$PetscInt}}; + dmsetfromoptions = true, + dmsetup = true, + options..., +) + opts = Options($petsclib, options...) + ref_points_per_proc_x = if isnothing(points_per_proc_x) + C_NULL + else + @assert length(points_per_proc_x) == procs_x + points_per_proc_x + end + ref_points_per_proc_y = if isnothing(points_per_proc_y) + C_NULL + else + @assert length(points_per_proc_y) == procs_y + points_per_proc_y + end + ref_points_per_proc_z = if isnothing(points_per_proc_z) + C_NULL + else + @assert length(points_per_proc_z) == procs_z + points_per_proc_z + end + da = DMDA{$PetscLib}(C_NULL, opts) + with(da.opts) do + @chk ccall( + (:DMDACreate3d, $petsc_library), + PetscErrorCode, + ( + MPI.MPI_Comm, + DMBoundaryType, + DMBoundaryType, + DMBoundaryType, + DMDAStencilType, + $PetscInt, + $PetscInt, + $PetscInt, + $PetscInt, + $PetscInt, + $PetscInt, + $PetscInt, + $PetscInt, + Ptr{$PetscInt}, + Ptr{$PetscInt}, + Ptr{$PetscInt}, + Ptr{CDM}, + ), + comm, + boundary_type_x, + boundary_type_y, + boundary_type_z, + stencil_type, + global_dim_x, + global_dim_y, + global_dim_z, + procs_x, + procs_y, + procs_z, + dof_per_node, + stencil_width, + ref_points_per_proc_x, + ref_points_per_proc_y, + ref_points_per_proc_z, + da, + ) + end + dmsetfromoptions && setfromoptions!(da) + dmsetup && setup!(da) + # We can only let the garbage collect finalize when we do not need to + # worry about MPI (since garbage collection is asyncronous) + if comm == MPI.COMM_SELF + finalizer(destroy, da) + end + return da +end + +""" + getinfo(da::DMDA) + +Get the info associated with the distributed array `da`. + +# External Links +$(_doc_external("DMDA/DMDAGetInfo")) +""" +getinfo(::DMDA) + +@for_petsc function getinfo(da::DMDA{$PetscLib}) + dim = [$PetscInt(0)] + glo_size = [$PetscInt(0), $PetscInt(0), $PetscInt(0)] + procs_per_dim = [$PetscInt(0), $PetscInt(0), $PetscInt(0)] + dof_per_node = [$PetscInt(0)] + stencil_width = [$PetscInt(0)] + boundary_type = [DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE] + stencil_type = [DMDA_STENCIL_STAR] + @chk ccall( + (:DMDAGetInfo, $petsc_library), + PetscErrorCode, + ( + CDM, + Ref{$PetscInt}, + Ref{$PetscInt}, + Ref{$PetscInt}, + Ref{$PetscInt}, + Ref{$PetscInt}, + Ref{$PetscInt}, + Ref{$PetscInt}, + Ref{$PetscInt}, + Ref{$PetscInt}, + Ref{DMBoundaryType}, + Ref{DMBoundaryType}, + Ref{DMBoundaryType}, + Ref{DMDAStencilType}, + ), + da, + dim, + Ref(glo_size, 1), + Ref(glo_size, 2), + Ref(glo_size, 3), + Ref(procs_per_dim, 1), + Ref(procs_per_dim, 2), + Ref(procs_per_dim, 3), + dof_per_node, + stencil_width, + Ref(boundary_type, 1), + Ref(boundary_type, 2), + Ref(boundary_type, 3), + stencil_type, + ) + return ( + dim = dim[1], + global_size = glo_size, + procs_per_dim = procs_per_dim, + dof_per_node = dof_per_node[1], + boundary_type = boundary_type, + stencil_width = stencil_width[1], + stencil_type = stencil_type[1], + ) +end + +""" + getcorners(da::DMDA) + +Returns a `NamedTuple` with the global indices (excluding ghost points) of the +`lower` and `upper` corners as well as the `size`. + +# External Links +$(_doc_external("DMDA/DMDAGetCorners")) +""" +function getcorners end + +@for_petsc function getcorners(da::DMDA{$PetscLib}) + info = DMDALocalInfo{$PetscInt}() + corners = [$PetscInt(0), $PetscInt(0), $PetscInt(0)] + local_size = [$PetscInt(0), $PetscInt(0), $PetscInt(0)] + @chk ccall( + (:DMDAGetCorners, $petsc_library), + PetscErrorCode, + ( + CDM, + Ref{$PetscInt}, + Ref{$PetscInt}, + Ref{$PetscInt}, + Ref{$PetscInt}, + Ref{$PetscInt}, + Ref{$PetscInt}, + ), + da, + Ref(corners, 1), + Ref(corners, 2), + Ref(corners, 3), + Ref(local_size, 1), + Ref(local_size, 2), + Ref(local_size, 3), + ) + corners .+= 1 + return ( + lower = corners, + upper = corners .+ local_size .- $PetscInt(1), + size = local_size, + ) +end + +""" + getghostcorners(da::DMDA) + +Returns a `NamedTuple` with the global indices (including ghost points) of the +`lower` and `upper` corners as well as the `size`. + +# External Links +$(_doc_external("DMDA/DMDAGetGhostCorners")) +""" +function getghostcorners end + +@for_petsc function getghostcorners(da::DMDA{$PetscLib}) + info = DMDALocalInfo{$PetscInt}() + corners = [$PetscInt(0), $PetscInt(0), $PetscInt(0)] + local_size = [$PetscInt(0), $PetscInt(0), $PetscInt(0)] + @chk ccall( + (:DMDAGetGhostCorners, $petsc_library), + PetscErrorCode, + ( + CDM, + Ref{$PetscInt}, + Ref{$PetscInt}, + Ref{$PetscInt}, + Ref{$PetscInt}, + Ref{$PetscInt}, + Ref{$PetscInt}, + ), + da, + Ref(corners, 1), + Ref(corners, 2), + Ref(corners, 3), + Ref(local_size, 1), + Ref(local_size, 2), + Ref(local_size, 3), + ) + corners .+= 1 + return ( + lower = corners, + upper = corners .+ local_size .- 1, + size = local_size, + ) +end + +""" + setuniformcoordinates!( + da::DMDA + xyzmin::NTuple{N, Real}, + xyzmax::NTuple{N, Real}, + ) where {N} + +Set uniform coordinates for the `da` using the lower and upper corners defined +by the `NTuple`s `xyzmin` and `xyzmax`. If `N` is less than the dimension of the +`da` then the value of the trailing coordinates is set to `0`. + +# External Links +$(_doc_external("DMDA/DMDASetUniformCoordinates")) +""" +function setuniformcoordinates! end + +@for_petsc function setuniformcoordinates!( + da::DMDA{$PetscLib}, + xyzmin::NTuple{N, Real}, + xyzmax::NTuple{N, Real}, +) where {N} + xmin = $PetscReal(xyzmin[1]) + xmax = $PetscReal(xyzmax[1]) + + ymin = (N > 1) ? $PetscReal(xyzmin[2]) : $PetscReal(0) + ymax = (N > 1) ? $PetscReal(xyzmax[2]) : $PetscReal(0) + + zmin = (N > 2) ? $PetscReal(xyzmin[3]) : $PetscReal(0) + zmax = (N > 2) ? $PetscReal(xyzmax[3]) : $PetscReal(0) + + @chk ccall( + (:DMDASetUniformCoordinates, $petsc_library), + PetscErrorCode, + ( + CDM, + $PetscReal, + $PetscReal, + $PetscReal, + $PetscReal, + $PetscReal, + $PetscReal, + ), + da, + xmin, + xmax, + ymin, + ymax, + zmin, + zmax, + ) + return nothing +end diff --git a/src/ksp.jl b/src/ksp.jl index 30df464f..f2a5ba13 100644 --- a/src/ksp.jl +++ b/src/ksp.jl @@ -4,27 +4,79 @@ const CKSPType = Cstring abstract type AbstractKSP{T, PetscLib} <: Factorization{T} end -mutable struct KSP{T, PetscLib} <: AbstractKSP{T, PetscLib} - ptr::CKSP - # keep around so that they don't get gc'ed - A # Operator - P # preconditioning operator +Base.@kwdef mutable struct KSP{T, PetscLib} <: AbstractKSP{T, PetscLib} + ptr::CKSP = C_NULL opts::Options{PetscLib} + # Stuff to keep around so that they don't get gc'ed + _A = nothing + _P = nothing + _dm = nothing + # Function pointers + ComputeRHS! = nothing + ComputeOperators! = nothing end -scalartype(::KSP{T}) where {T} = T +struct WrappedKSP{T, PetscLib} <: AbstractKSP{T, PetscLib} + ptr::CKSP +end +scalartype(::KSP{T}) where {T} = T Base.eltype(::KSP{T}) where {T} = T + LinearAlgebra.transpose(ksp) = LinearAlgebra.Transpose(ksp) LinearAlgebra.adjoint(ksp) = LinearAlgebra.Adjoint(ksp) +""" + KSPSetComputeRHS!( + ksp::KSP{Number}, + ComputeRHS!, + ctx = C_NULL, + ) + +Set the right-hand side function `ComputeRHS!` for the `ksp` using the user +`ctx`. `ComputeRHS!` should be callable with three arguments of type +`(::KSP{Number}, ::Vec, ::Ptr)`; +see [PETSc manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/KSP/KSPSetComputeRHS.html) +""" +function KSPSetComputeRHS! end + +""" + struct Fn_KSPComputeRHS{T} end + +Type used to wrap `ComputeRHS!` functions in KSP +""" +struct Fn_KSPComputeRHS{T} end + +""" + KSPSetComputeOperators!( + ksp::KSP{Number}, + ComputeOperators!, + ctx = C_NULL, + ) + +Set the linear operators function `ComputeOperators!` for the `ksp` using the +user `ctx`. `ComputeOperators!` should be callable with four arguments of type +`(::KSP{Number}, ::Mat, ::Mat, ::Ptr)`; +see [PETSc manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/KSP/KSPSetComputeOperators.html) +""" +function KSPSetComputeOperators! end + +""" + struct Fn_KSPComputeOperators{T} end + +Type used to wrap `ComputeOperators!` functions in KSP +""" +struct Fn_KSPComputeOperators{T} end + @for_libpetsc begin function KSP{$PetscScalar}(comm::MPI.Comm; kwargs...) @assert initialized($petsclib) opts = Options($petsclib, kwargs...) - ksp = KSP{$PetscScalar, $PetscLib}(C_NULL, nothing, nothing, opts) - @chk ccall((:KSPCreate, $libpetsc), PetscErrorCode, (MPI.MPI_Comm, Ptr{CKSP}), comm, ksp) + ksp = KSP{$PetscScalar, $PetscLib}(opts=opts) + with(ksp.opts) do + @chk ccall((:KSPCreate, $libpetsc), PetscErrorCode, (MPI.MPI_Comm, Ptr{CKSP}), comm, ksp) + end if comm == MPI.COMM_SELF finalizer(destroy, ksp) end @@ -39,11 +91,92 @@ LinearAlgebra.adjoint(ksp) = LinearAlgebra.Adjoint(ksp) function setoperators!(ksp::KSP{$PetscScalar}, A::AbstractMat{$PetscScalar}, P::AbstractMat{$PetscScalar}) @chk ccall((:KSPSetOperators, $libpetsc), PetscErrorCode, (CKSP, CMat, CMat), ksp, A, P) - ksp.A = A - ksp.P = P + ksp._A = A + ksp._P = P return nothing end + function (::Fn_KSPComputeRHS{$PetscScalar})( + new_ksp_ptr::CKSP, + cb::CVec, + ksp_ptr::Ptr{Cvoid} + )::$PetscInt + ksp = unsafe_pointer_to_objref(ksp_ptr) + new_ksp = WrappedKSP{$PetscScalar, $PetscLib}(new_ksp_ptr) + b = Vec{$PetscScalar}(cb) + ierr = ksp.ComputeRHS!(new_ksp, b) + return $PetscInt(ierr) + end + + function KSPSetComputeRHS!( + ksp::KSP{$PetscScalar}, + ComputeRHS! + ) + ptr_ksp = pointer_from_objref(ksp) + fptr = @cfunction(Fn_KSPComputeRHS{$PetscScalar}(), + $PetscInt, + (CKSP, CVec, Ptr{Cvoid})) + with(ksp.opts) do + @chk ccall((:KSPSetComputeRHS, $libpetsc), PetscErrorCode, + (CKSP, Ptr{Cvoid}, Ptr{Cvoid}), + ksp, fptr, ptr_ksp) + end + ksp.ComputeRHS! = ComputeRHS! + return ksp + end + + function (::Fn_KSPComputeOperators{$PetscScalar})( + new_ksp_ptr::CKSP, + cA::CMat, + cP::CMat, + ksp_ptr::Ptr{Cvoid} + )::$PetscInt + ksp = unsafe_pointer_to_objref(ksp_ptr) + new_ksp = WrappedKSP{$PetscScalar, $PetscLib}(new_ksp_ptr) + A = Mat{$PetscScalar}(cA) + P = Mat{$PetscScalar}(cP) + ierr = ksp.ComputeOperators!(new_ksp, A, P) + return $PetscInt(ierr) + end + + function KSPSetComputeOperators!( + ksp::KSP{$PetscScalar}, + ComputeOperators! + ) + ptr_ksp = pointer_from_objref(ksp) + fptr = @cfunction(Fn_KSPComputeOperators{$PetscScalar}(), + $PetscInt, + (CKSP, CMat, CMat, Ptr{Cvoid})) + with(ksp.opts) do + @chk ccall((:KSPSetComputeOperators, $libpetsc), PetscErrorCode, + (CKSP, Ptr{Cvoid}, Ptr{Cvoid}), + ksp, fptr, ptr_ksp) + end + ksp.ComputeOperators! = ComputeOperators! + return ksp + end + + function KSPSetDM!(ksp::KSP{$PetscScalar}, dm::AbstractDM{$PetscLib}) + with(ksp.opts) do + @chk ccall((:KSPSetDM, $libpetsc), PetscErrorCode, (CKSP, CDM), ksp, dm) + end + ksp._dm = dm + return nothing + end + + function DMDA(ksp::AbstractKSP{$PetscScalar}) + t_dm = Ref{CDM}() + @chk ccall( + (:KSPGetDM, $libpetsc), + PetscErrorCode, + (CKSP, Ptr{CDM}), + ksp, + t_dm, + ) + dm = DMDA{$PetscLib}(t_dm[]) + return dm + end + function settolerances!(ksp::KSP{$PetscScalar}; rtol=PETSC_DEFAULT, atol=PETSC_DEFAULT, divtol=PETSC_DEFAULT, max_it=PETSC_DEFAULT) @chk ccall((:KSPSetTolerances, $libpetsc), PetscErrorCode, (CKSP, $PetscReal, $PetscReal, $PetscReal, $PetscInt), @@ -52,7 +185,9 @@ LinearAlgebra.adjoint(ksp) = LinearAlgebra.Adjoint(ksp) end function setfromoptions!(ksp::KSP{$PetscScalar}) - @chk ccall((:KSPSetFromOptions, $libpetsc), PetscErrorCode, (CKSP,), ksp) + with(ksp.opts) do + @chk ccall((:KSPSetFromOptions, $libpetsc), PetscErrorCode, (CKSP,), ksp) + end end function gettype(ksp::KSP{$PetscScalar}) @@ -90,6 +225,14 @@ LinearAlgebra.adjoint(ksp) = LinearAlgebra.Adjoint(ksp) return x end + function solve!(ksp::KSP{$PetscScalar}) + with(ksp.opts) do + @chk ccall((:KSPSolve, $libpetsc), PetscErrorCode, + (CKSP, CVec, CVec), ksp, C_NULL, C_NULL) + end + return nothing + end + function solve!(x::AbstractVec{$PetscScalar}, tksp::Transpose{T,K}, b::AbstractVec{$PetscScalar}) where {T,K <: KSP{$PetscScalar}} ksp = parent(tksp) with(ksp.opts) do @@ -124,9 +267,24 @@ Any PETSc options prefixed with `ksp_` and `pc_` can be passed as keywords. function KSP(A::AbstractMat{T}, P::AbstractMat{T}=A; kwargs...) where {T} ksp = KSP{T}(getcomm(A); kwargs...) setoperators!(ksp, A, P) - with(ksp.opts) do - setfromoptions!(ksp) - end + setfromoptions!(ksp) + return ksp +end + +""" + KSP(da::AbstractDM; options...) + +Construct a PETSc Krylov subspace solver from the distributed mesh + +Any PETSc options prefixed with `ksp_` and `pc_` can be passed as keywords. + +see [PETSc manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/KSP/KSPSetDM.html) +""" +function KSP(dm::AbstractDM{PetscLib}; kwargs...) where {PetscLib} + T = scalartype(PetscLib) + ksp = KSP{T}(getcomm(dm); kwargs...) + KSPSetDM!(ksp, dm) + setfromoptions!(ksp) return ksp end diff --git a/src/mat.jl b/src/mat.jl index e4b044ba..d42f916b 100644 --- a/src/mat.jl +++ b/src/mat.jl @@ -1,4 +1,5 @@ const CMat = Ptr{Cvoid} +const CMatNullSpace = Ptr{Cvoid} abstract type AbstractMat{T} <: AbstractMatrix{T} end scalartype(::AbstractMat{T}) where {T} = T @@ -17,6 +18,18 @@ mutable struct MatSeqAIJ{T} <: AbstractMat{T} ptr::CMat end + +""" + Mat{T} + +Container for an abstract PETSc matrix + +See [PETSc manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/Mat/Mat.html) +""" +mutable struct Mat{T} <: AbstractMat{T} + ptr::CMat +end + """ MatSeqDense{T} @@ -27,9 +40,160 @@ mutable struct MatSeqDense{T} <: AbstractMat{T} array::Matrix{T} end +""" + MatStencil{PetscInt} + +Equivalent to the `MatStencil` in PETSc + +See [PETSc manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/Mat/MatStencil.html) +""" +struct MatStencil{PetscInt} + "third grid index" + k::PetscInt + "second grid index" + j::PetscInt + "first grid index" + i::PetscInt + "degree of freedom" + c::PetscInt + function MatStencil{PetscInt}(;i, j = 1, k = 1, c = 1) where PetscInt + # convert to zero-based indexing + new{PetscInt}(k - 1, j - 1, i - 1, c - 1) + end +end +# Since julia uses 1-based indexing we need to convert on access +function Base.getproperty(obj::MatStencil{PetscInt}, sym::Symbol) where PetscInt + if sym in (:i, :j, :k, :c) + return getfield(obj, sym) + PetscInt(1) + else # fallback to getfield + return getfield(obj, sym) + end +end +# Since julia uses 1-based indexing we need to convert on show +function Base.show(io::IO, m::MatStencil{PetscInt}) where {PetscInt} + print(io, typeof(m)) + print(io, "(i = ", m.i,) + print(io, ", j = ", m.j,) + print(io, ", k = ", m.k,) + print(io, ", c = ", m.c, ")") +end +function Base.show(io::IO, ::MIME"text/plain", m::MatStencil{PetscInt}) where PetscInt + print(io, "(i = ", m.i,) + print(io, ", j = ", m.j,) + print(io, ", k = ", m.k,) + print(io, ", c = ", m.c, ")") +end + + + +""" + MatNullSpace{T} + +Object that removes a null space from a vector, i.e. orthogonalizes the vector +to a subspace; +see [MatNullSpace](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/Mat/MatNullSpace.html) +and [MatNullSpaceCreate](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/Mat/MatNullSpaceCreate.html) + +!!! Note + The caller is responsible for calling `destroy` on this object +""" +mutable struct MatNullSpace{T} + ptr::CMatNullSpace + __comm__::MPI.Comm +end +# allows us to pass XXMat objects directly into CMat ccall signatures +Base.cconvert(::Type{CMatNullSpace}, obj::MatNullSpace) = obj.ptr +# allows us to pass XXMat objects directly into Ptr{CMat} ccall signatures +Base.unsafe_convert(::Type{Ptr{CMatNullSpace}}, obj::MatNullSpace) = + convert(Ptr{CMatNullSpace}, pointer_from_objref(obj)) + +""" + MatNullSpaceRemove!(nullspace, vec) + +Removes all the components of a `nullspace` from `vec` + +see [PETSc manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/Mat/MatNullSpaceRemove.html) +""" +function MatNullSpaceRemove! end + +""" + MatSetNullSpace!(mat, nullspace) + +Attach `nullspace` to `mat` + +see [PETSc manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/Mat/MatSetNullSpace.html) +""" +function MatSetNullSpace! end + +""" + MatSetValuesStencil!(mat::AbstractMat{PetscScalar}, + rows::Vector{MatStencil{PetscInt}}, + cols::Vector{MatStencil{PetscInt}}, + vals::Vector{PetscScalar}, + mode; + num_cols = length(col), + num_rows = length(row) + ) +Insert the `vals` specified by `rows` and `cols` stencil indices into the `mat`. +The optional arguments `num_cosl` and `num_rows` allow the limiting of the +elements of the `rows` and `cols` vectors. + +see [PETSc manual](https://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/Mat/MatSetValuesStencil.html) +""" +function MatSetValuesStencil! end @for_libpetsc begin + function MatNullSpace{$PetscScalar}(comm::MPI.Comm, has_constant, n=0, vecs=nothing) + @assert initialized($petsclib) + @assert n == 0 && isnothing(vecs) + nullspace = MatNullSpace{$PetscScalar}(C_NULL, comm) + @chk ccall((:MatNullSpaceCreate, $libpetsc), PetscErrorCode, + (MPI.MPI_Comm, + PetscBool, + $PetscInt, + Ptr{CVec}, + Ptr{CMatNullSpace} + ), + comm, has_constant, n, C_NULL, nullspace) + return nullspace + end + + function MatNullSpaceRemove!( + nullspace::MatNullSpace{$PetscScalar}, + vec::AbstractVec{$PetscScalar}, + ) + @chk ccall( + (:MatNullSpaceRemove, $libpetsc), + PetscErrorCode, + (CMatNullSpace, CVec), + nullspace, + vec, + ) + return nothing + end + + + function MatSetNullSpace!( + mat::Mat{$PetscScalar}, + nullspace::MatNullSpace{$PetscScalar}, + ) + @chk ccall( + (:MatSetNullSpace, $libpetsc), + PetscErrorCode, + (CMat, CMatNullSpace), + mat, + nullspace, + ) + return nothing + end + + function destroy(nullspace::MatNullSpace{$PetscScalar}) + finalized($petsclib) || + @chk ccall((:MatNullSpaceDestroy, $libpetsc), PetscErrorCode, (Ptr{CMatNullSpace},), nullspace) + return nothing + end + function MatSeqAIJ{$PetscScalar}(m::Integer, n::Integer, nnz::Vector{$PetscInt}) @assert initialized($petsclib) comm = MPI.COMM_SELF @@ -51,6 +215,31 @@ end return mat end + function MatSetValuesStencil!(mat::AbstractMat{$PetscScalar}, + rows::Vector{MatStencil{$PetscInt}}, + cols::Vector{MatStencil{$PetscInt}}, + vals::Vector{$PetscScalar}, + mode::InsertMode; + num_rows = length(rows), + num_cols = length(cols), + ) + @assert length(vals) >= num_cols * num_rows + @assert length(cols) >= num_cols + @assert length(rows) >= num_rows + @chk ccall((:MatSetValuesStencil, $libpetsc), PetscErrorCode, + (CMat, + $PetscInt, Ptr{MatStencil{$PetscInt}}, + $PetscInt, Ptr{MatStencil{$PetscInt}}, + Ptr{$PetscScalar}, InsertMode), + mat, + num_rows, rows, + num_cols, cols, + vals, mode + ) + return nothing + end + + function destroy(M::AbstractMat{$PetscScalar}) @@ -74,6 +263,19 @@ end return val end + function Base.getindex(M::AbstractMat{$PetscScalar}, i::Integer, j::Integer) + val = Ref{$PetscScalar}() + @chk ccall((:MatGetValues, $libpetsc), PetscErrorCode, + (CMat, + $PetscInt, Ptr{$PetscInt}, + $PetscInt, Ptr{$PetscInt}, + Ptr{$PetscScalar}), + M, + 1, Ref{$PetscInt}(i-1), + 1, Ref{$PetscInt}(j-1), + val) + return val[] + end function assemblybegin(M::AbstractMat{$PetscScalar}, t::MatAssemblyType=MAT_FINAL_ASSEMBLY) @chk ccall((:MatAssemblyBegin, $libpetsc), PetscErrorCode, (CMat, MatAssemblyType), M, t) diff --git a/src/pc.jl b/src/pc.jl index 689929f9..1dd2f715 100644 --- a/src/pc.jl +++ b/src/pc.jl @@ -2,7 +2,6 @@ const CPC = Ptr{Cvoid} const CPCType = Cstring - abstract type AbstractPC{T} end mutable struct PC{T} <: AbstractPC{T} diff --git a/src/sys.jl b/src/sys.jl index dc3b0729..c897f439 100644 --- a/src/sys.jl +++ b/src/sys.jl @@ -7,6 +7,7 @@ const UnionPetscTypes = Union{ AbstractViewer, AbstractSNES, AbstractPC, + AbstractDM, Options, } @@ -19,17 +20,18 @@ function Base.unsafe_convert(::Type{Ptr{CPetscObject}}, obj::UnionPetscTypes) convert(Ptr{CPetscObject}, pointer_from_objref(obj)) end -@for_libpetsc begin +@for_petsc begin function getcomm( obj::Union{ AbstractKSP{$PetscScalar}, AbstractMat{$PetscScalar}, AbstractVec{$PetscScalar}, + AbstractDM{$PetscLib}, }, ) comm = MPI.Comm() @chk ccall( - (:PetscObjectGetComm, $libpetsc), + (:PetscObjectGetComm, $petsc_library), PetscErrorCode, (CPetscObject, Ptr{MPI.MPI_Comm}), obj, @@ -42,7 +44,7 @@ end #= # Call the PetscCommDuplicate to increase reference count @chk ccall( - (:PetscCommDuplicate, $libpetsc), + (:PetscCommDuplicate, $petsc_library), PetscErrorCode, (MPI.MPI_Comm, Ptr{MPI.MPI_Comm}, Ptr{Cvoid}), comm, @@ -60,13 +62,13 @@ end #= #XXX Not sure why this doesn't work -@for_libpetsc begin +@for_petsc begin function PetscCommDestroy( comm::MPI.Comm ) @show comm.val @chk ccall( - (:PetscCommDestroy, $libpetsc), + (:PetscCommDestroy, $petsc_library), PetscErrorCode, (Ptr{MPI.MPI_Comm},), comm, diff --git a/src/vec.jl b/src/vec.jl index 24d1ebbb..8f503ef9 100644 --- a/src/vec.jl +++ b/src/vec.jl @@ -26,6 +26,18 @@ mutable struct VecSeq{T} <: AbstractVec{T} array::Vector{T} end +""" + Vec(v::CVec) + +Container for an abstract PETSc vector + +# External Links +$(_doc_external("Vec/Vec")) +""" +mutable struct Vec{T} <: AbstractVec{T} + ptr::CVec +end + Base.eltype(::Type{V}) where {V<:AbstractVec{T}} where T = T Base.eltype(v::AbstractVec{T}) where {T} = T Base.size(v::AbstractVec) = (length(v),) @@ -127,19 +139,93 @@ Base.parent(v::AbstractVec) = v.array end return v end + + function Base.fill!(v::AbstractVec{$PetscScalar}, x) + @chk ccall((:VecSet, $libpetsc), + PetscErrorCode, + (CVec, $PetscScalar), + v, $PetscScalar(x)) + return v + end + + function Base.setindex!( + v::AbstractVec{$PetscScalar}, + val, + i::Integer, + ) + @chk ccall( + (:VecSetValues, $libpetsc), + PetscErrorCode, + (CVec, $PetscInt, Ptr{$PetscInt}, Ptr{$PetscScalar}, InsertMode), + v, + 1, + Ref{$PetscInt}(i - 1), + Ref{$PetscScalar}(val), + INSERT_VALUES, + ) + + return val + end + + function Base.getindex( + v::AbstractVec{$PetscScalar}, + i::Integer, + ) + vals = [$PetscScalar(0)] + @chk ccall( + (:VecGetValues, $libpetsc), + PetscErrorCode, + (CVec, $PetscInt, Ptr{$PetscInt}, Ptr{$PetscScalar}), + v, + 1, + Ref{$PetscInt}(i - 1), + vals, + ) + + return vals[1] + end end """ unsafe_localarray(PetscScalar, ptr:CVec; read=true, write=true) + unsafe_localarray(ptr:AbstractVec; read=true, write=true) Return an `Array{PetscScalar}` containing local portion of the PETSc data. -`finalize` should be called on the `Array` before the data can be used. - Use `read=false` if the array is write-only; `write=false` if read-only. + +!!! note + `Base.finalize` should be called on the `Array` before the data can be used. """ unsafe_localarray +unsafe_localarray(v::AbstractVec{T}; kwargs...) where {T} = + unsafe_localarray(T, v.ptr; kwargs...) + +""" + map_unsafe_localarray!(f!, x::AbstractVec{T}; read=true, write=true) + +Convert `x` to an `Array{T}` and apply the function `f!`. + +Use `read=false` if the array is write-only; `write=false` if read-only. + +# Examples +```julia-repl +julia> map_unsafe_localarray(x; write=true) do x + @. x .*= 2 +end + +!!! note + `Base.finalize` should is automatically called on the array. +""" +function map_unsafe_localarray!(f!, v::AbstractVec{T}; kwargs...) where {T} + array = unsafe_localarray(T, v.ptr; kwargs...) + f!(array) + Base.finalize(array) +end + + + function Base.show(io::IO, ::MIME"text/plain", vec::AbstractVec) _show(io, vec) end @@ -160,3 +246,85 @@ $(_doc_external("Vec/VecGetOwnershipRange")) """ ownershiprange +""" + setvalues!( + vector::AbstractVec{PetscScalar}, + indices::Vector{PetscInt}, + vals::Vector{PetscScalar}, + mode::InsertMode; + num_vals = length(ind) + ) + +Insert a set of values into the `vector`. Equivalent to one of the following +depending on the `mode` +```julia +vector[indices[1:num_vals]] .= vals[1:num_vals] +vector[indices[1:num_vals]] .+= vals[1:num_vals] +``` + +!!! warning + `indices` should use 0-based indexing! + +# External Links +$(_doc_external("Vec/VecSetValues")) +""" +function setvalues!(::AbstractVec) end + +@for_libpetsc function setvalues!( + vec::AbstractVec{$PetscScalar}, + inds::Vector{$PetscInt}, + vals::Vector{$PetscScalar}, + mode::InsertMode; + num_vals = length(inds) +) + @chk ccall( + (:VecSetValues, $libpetsc), + PetscErrorCode, + (CVec, $PetscInt, Ptr{$PetscInt}, Ptr{$PetscScalar}, InsertMode), + vec, + num_vals, + inds, + vals, + mode, + ) + return vals +end + +""" + getvalues!( + vector::AbstractVec{PetscScalar}, + indices::Vector{PetscInt}, + vals::Vector{PetscScalar}; + num_vals = length(inds) + ) + +Get a set of values from the `vector`. Equivalent to one of the following +```julia +vals[1:num_vals] .= vector[indices[1:num_vals]] +``` + +!!! warning + `indices` should use 0-based indexing! + +# External Links +$(_doc_external("Vec/VecGetValues")) +""" +function getvalues!(::AbstractVec) end + +@for_libpetsc function getvalues!( + vec::AbstractVec{$PetscScalar}, + inds::Vector{$PetscInt}, + vals::Vector{$PetscScalar}; + num_vals = length(inds) +) + @chk ccall( + (:VecGetValues, $libpetsc), + PetscErrorCode, + (CVec, $PetscInt, Ptr{$PetscInt}, Ptr{$PetscScalar}), + vec, + num_vals, + inds, + vals, + ) + return vals +end diff --git a/test/dmda.jl b/test/dmda.jl new file mode 100644 index 00000000..abdc4899 --- /dev/null +++ b/test/dmda.jl @@ -0,0 +1,488 @@ +using Test +using PETSc, MPI +MPI.Initialized() || MPI.Init() + +@testset "DMDACreate1D" begin + comm = MPI.COMM_WORLD + mpirank = MPI.Comm_rank(comm) + mpisize = MPI.Comm_size(comm) + for petsclib in PETSc.petsclibs + PETSc.initialize(petsclib) + PetscScalar = PETSc.scalartype(petsclib) + PetscInt = PETSc.inttype(petsclib) + # Loop over all boundary types and try to use them + for boundary_type in instances(PETSc.DMBoundaryType) + @testset "$boundary_type" begin + dof_per_node = 4 + stencil_width = 5 + + # We test both setting and not setting the point distribution + points_per_proc = [PetscInt(10 + i) for i in 1:mpisize] + proc_global_offsets = + [PetscInt(0), accumulate(+, points_per_proc)...] + + global_size = proc_global_offsets[end] + + # left and right ghost points + gl = + boundary_type == PETSc.DM_BOUNDARY_NONE && mpirank == 0 ? + 0 : stencil_width + gr = + boundary_type == PETSc.DM_BOUNDARY_NONE && + mpirank == mpisize - 1 ? 0 : stencil_width + + # Set the points + da = PETSc.DMDACreate1d( + petsclib, + comm, + boundary_type, + global_size, + dof_per_node, + stencil_width, + points_per_proc, + ) + + @test PETSc.gettype(da) == "da" + @test PETSc.getdimension(da) == 1 + + da_info = PETSc.getinfo(da) + corners = PETSc.getcorners(da) + ghost_corners = PETSc.getghostcorners(da) + + @test da_info.dim == 1 + @test da_info.global_size == [global_size, 1, 1] + @test da_info.procs_per_dim == [mpisize, 1, 1] + @test da_info.boundary_type == [ + boundary_type, + PETSc.DM_BOUNDARY_NONE, + PETSc.DM_BOUNDARY_NONE, + ] + @test da_info.stencil_type == PETSc.DMDA_STENCIL_BOX + @test da_info.stencil_width == stencil_width + @test corners.lower == + [proc_global_offsets[mpirank + 1] + 1, 1, 1] + @test corners.upper == [proc_global_offsets[mpirank + 2], 1, 1] + @test corners.size == [points_per_proc[mpirank + 1], 1, 1] + @test ghost_corners.lower == + [proc_global_offsets[mpirank + 1] + 1 - gl, 1, 1] + @test ghost_corners.upper == + [proc_global_offsets[mpirank + 2] + gr, 1, 1] + @test ghost_corners.size == + [points_per_proc[mpirank + 1] + gl + gr, 1, 1] + + # Do not set the points and test option parsing + da_refine = 2 + da = PETSc.DMDACreate1d( + petsclib, + comm, + boundary_type, + global_size, + dof_per_node, + stencil_width, + nothing; + da_refine = da_refine, + ) + @test PETSc.gettype(da) == "da" + @test PETSc.getdimension(da) == 1 + + da_info = PETSc.getinfo(da) + + @test da_info.dim == 1 + if boundary_type == PETSc.DM_BOUNDARY_PERIODIC + @test da_info.global_size == + [2^da_refine * global_size, 1, 1] + else + @test da_info.global_size == + [2^da_refine * (global_size - 1) + 1, 1, 1] + end + @test da_info.procs_per_dim == [mpisize, 1, 1] + @test da_info.boundary_type == [ + boundary_type, + PETSc.DM_BOUNDARY_NONE, + PETSc.DM_BOUNDARY_NONE, + ] + @test da_info.stencil_type == PETSc.DMDA_STENCIL_BOX + @test da_info.stencil_width == stencil_width + # In this case we cannot check the numbers locally + + # TODO: Need a better test? + ksp = PETSc.KSP(da) + @test PETSc.gettype(ksp) == "gmres" + + end + end + PETSc.finalize(petsclib) + end +end + +@testset "DMDACreate2D" begin + comm = MPI.COMM_WORLD + mpirank = MPI.Comm_rank(comm) + mpisize = MPI.Comm_size(comm) + global_size_x = 100 + global_size_y = 45 + for petsclib in PETSc.petsclibs + PetscScalar = PETSc.scalartype(petsclib) + PetscInt = PETSc.inttype(petsclib) + PETSc.initialize(petsclib) + # Loop over all boundary types and stencil types + for stencil_type in instances(PETSc.DMDAStencilType), + boundary_type_y in instances(PETSc.DMBoundaryType), + boundary_type_x in instances(PETSc.DMBoundaryType) + + # skip unsupported stencils + stencil_type == PETSc.DMDA_STENCIL_BOX && + ( + boundary_type_x == PETSc.DM_BOUNDARY_MIRROR || + boundary_type_y == PETSc.DM_BOUNDARY_MIRROR + ) && + continue + + @testset "$boundary_type_x, $boundary_type_y, $stencil_type" begin + dof_per_node = 4 + stencil_width = 5 + + # Set the points + da = PETSc.DMDACreate2d( + petsclib, + comm, + boundary_type_x, + boundary_type_y, + stencil_type, + global_size_x, + global_size_y, + PETSc.PETSC_DECIDE, + PETSc.PETSC_DECIDE, + dof_per_node, + stencil_width, + nothing, + nothing, + ) + @test PETSc.gettype(da) == "da" + @test PETSc.getdimension(da) == 2 + + da_info = PETSc.getinfo(da) + + @test da_info.global_size == [global_size_x, global_size_y, 1] + @test da_info.dim == 2 + @test prod(da_info.procs_per_dim) == mpisize + @test da_info.boundary_type == + [boundary_type_x, boundary_type_y, PETSc.DM_BOUNDARY_NONE] + @test da_info.stencil_type == stencil_type + @test da_info.stencil_width == stencil_width + + # test refinement + da_refine = 2 + da = PETSc.DMDACreate2d( + petsclib, + comm, + boundary_type_x, + boundary_type_y, + stencil_type, + global_size_x, + global_size_y, + PETSc.PETSC_DECIDE, + PETSc.PETSC_DECIDE, + dof_per_node, + stencil_width, + nothing, + nothing; + da_refine = da_refine, + ) + @test PETSc.gettype(da) == "da" + @test PETSc.getdimension(da) == 2 + + da_info = PETSc.getinfo(da) + + # Compute refined global size + ref_global_size_x = + boundary_type_x == PETSc.DM_BOUNDARY_PERIODIC ? + 2^da_refine * global_size_x : + 2^da_refine * (global_size_x - 1) + 1 + ref_global_size_y = + boundary_type_y == PETSc.DM_BOUNDARY_PERIODIC ? + 2^da_refine * global_size_y : + 2^da_refine * (global_size_y - 1) + 1 + + @test da_info.global_size == + [ref_global_size_x, ref_global_size_y, 1] + @test prod(da_info.procs_per_dim) == mpisize + @test da_info.boundary_type == + [boundary_type_x, boundary_type_y, PETSc.DM_BOUNDARY_NONE] + @test da_info.stencil_type == stencil_type + @test da_info.stencil_width == stencil_width + + # TODO: Test with specific distribution of processors and sizes + + # TODO: Need a better test? + ksp = PETSc.KSP(da) + @test PETSc.gettype(ksp) == "gmres" + end + end + PETSc.finalize(petsclib) + end +end + +@testset "DMDACreate3D" begin + comm = MPI.COMM_WORLD + mpirank = MPI.Comm_rank(comm) + mpisize = MPI.Comm_size(comm) + global_size_x = 12 + global_size_y = 13 + global_size_z = 14 + for petsclib in PETSc.petsclibs + PetscScalar = PETSc.scalartype(petsclib) + PetscInt = PETSc.inttype(petsclib) + PETSc.initialize(petsclib) + # Loop over all boundary types and stencil types + for stencil_type in instances(PETSc.DMDAStencilType), + boundary_type_z in instances(PETSc.DMBoundaryType), + boundary_type_y in instances(PETSc.DMBoundaryType), + boundary_type_x in instances(PETSc.DMBoundaryType) + + stencil_type == PETSc.DMDA_STENCIL_BOX && + ( + boundary_type_x == PETSc.DM_BOUNDARY_MIRROR || + boundary_type_y == PETSc.DM_BOUNDARY_MIRROR || + boundary_type_z == PETSc.DM_BOUNDARY_MIRROR + ) && + continue + + @testset "$boundary_type_x, $boundary_type_y, $boundary_type_z, $stencil_type" begin + dof_per_node = 4 + stencil_width = 2 + + # Set the points + da = PETSc.DMDACreate3d( + petsclib, + comm, + boundary_type_x, + boundary_type_y, + boundary_type_z, + stencil_type, + global_size_x, + global_size_y, + global_size_z, + PETSc.PETSC_DECIDE, + PETSc.PETSC_DECIDE, + PETSc.PETSC_DECIDE, + dof_per_node, + stencil_width, + nothing, + nothing, + nothing, + ) + @test PETSc.gettype(da) == "da" + @test PETSc.getdimension(da) == 3 + + da_info = PETSc.getinfo(da) + + @test da_info.global_size == + [global_size_x, global_size_y, global_size_z] + @test da_info.dim == 3 + @test prod(da_info.procs_per_dim) == mpisize + @test da_info.boundary_type == + [boundary_type_x, boundary_type_y, boundary_type_z] + @test da_info.stencil_type == stencil_type + @test da_info.stencil_width == stencil_width + + # test refinement + da_refine = 2 + da = PETSc.DMDACreate3d( + petsclib, + comm, + boundary_type_x, + boundary_type_y, + boundary_type_z, + stencil_type, + global_size_x, + global_size_y, + global_size_z, + PETSc.PETSC_DECIDE, + PETSc.PETSC_DECIDE, + PETSc.PETSC_DECIDE, + dof_per_node, + stencil_width, + nothing, + nothing, + nothing; + da_refine = da_refine, + ) + @test PETSc.gettype(da) == "da" + @test PETSc.getdimension(da) == 3 + + da_info = PETSc.getinfo(da) + + # Compute refined global size + ref_global_size_x = + boundary_type_x == PETSc.DM_BOUNDARY_PERIODIC ? + 2^da_refine * global_size_x : + 2^da_refine * (global_size_x - 1) + 1 + ref_global_size_y = + boundary_type_y == PETSc.DM_BOUNDARY_PERIODIC ? + 2^da_refine * global_size_y : + 2^da_refine * (global_size_y - 1) + 1 + ref_global_size_z = + boundary_type_z == PETSc.DM_BOUNDARY_PERIODIC ? + 2^da_refine * global_size_z : + 2^da_refine * (global_size_z - 1) + 1 + + @test da_info.global_size == + [ref_global_size_x, ref_global_size_y, ref_global_size_z] + @test prod(da_info.procs_per_dim) == mpisize + @test da_info.boundary_type == + [boundary_type_x, boundary_type_y, boundary_type_z] + @test da_info.stencil_type == stencil_type + @test da_info.stencil_width == stencil_width + + # TODO: Test with specific distribution of processors and sizes + + # TODO: Need a better test? + ksp = PETSc.KSP(da) + @test PETSc.gettype(ksp) == "gmres" + end + end + PETSc.finalize(petsclib) + end +end + +@testset "creatematrix" begin + comm = MPI.COMM_WORLD + mpirank = MPI.Comm_rank(comm) + mpisize = MPI.Comm_size(comm) + for petsclib in PETSc.petsclibs + PETSc.initialize(petsclib) + PetscScalar = PETSc.scalartype(petsclib) + PetscInt = PETSc.inttype(petsclib) + boundary_type = PETSc.DM_BOUNDARY_NONE + dof_per_node = 1 + stencil_width = 1 + number_points = 10 + points_per_proc = [PetscInt(10) for i in 1:mpisize] + global_size = sum(points_per_proc) + # Set the points + da = PETSc.DMDACreate1d( + petsclib, + comm, + boundary_type, + global_size, + dof_per_node, + stencil_width, + points_per_proc, + ) + mat = PETSc.creatematrix(da) + + # Build the 1-D Laplacian FD matrix + Sten = PETSc.MatStencil{PetscInt} + col = Vector{Sten}(undef, 2) + row = Vector{Sten}(undef, 2) + val = Vector{PetscScalar}(undef, 4) + corners = PETSc.getcorners(da) + + for i in corners.lower[1]:min(corners.upper[1], global_size - 1) + row[1] = Sten(i = i) + row[2] = Sten(i = i + 1) + col[1] = Sten(i = i) + col[2] = Sten(i = i + 1) + val .= [-1, 1, 1, -1] + PETSc.MatSetValuesStencil!(mat, row, col, val, PETSc.ADD_VALUES) + end + + PETSc.assemblybegin(mat) + PETSc.assemblyend(mat) + + for i in corners.lower[1]:corners.upper[1] + if i == 1 + @test mat[i, i:(i + 1)] == [-1, 1] + elseif i == global_size + @test mat[i, (i - 1):i] == [1, -1] + else + @test mat[i, (i - 1):(i + 1)] == [1, -2, 1] + end + end + PETSc.finalize(petsclib) + end +end + +@testset "DM Vectors and Coordinates" begin + comm = MPI.COMM_WORLD + mpirank = MPI.Comm_rank(comm) + mpisize = MPI.Comm_size(comm) + for petsclib in PETSc.petsclibs + PETSc.initialize(petsclib) + PetscScalar = PETSc.scalartype(petsclib) + PetscInt = PETSc.inttype(petsclib) + boundary_type = PETSc.DM_BOUNDARY_NONE + dof_per_node = 1 + stencil_width = 1 + number_points = 10 + points_per_proc = [PetscInt(10) for i in 1:mpisize] + global_size = sum(points_per_proc) + # Set the points + da = PETSc.DMDACreate1d( + petsclib, + comm, + boundary_type, + global_size, + dof_per_node, + stencil_width, + points_per_proc, + ) + + corners = PETSc.getcorners(da) + + # Create the local and global vectors + local_vec = PETSc.createlocalvector(da) + global_vec = PETSc.createglobalvector(da) + + # Fill everything with some data + fill!(local_vec, mpirank) + fill!(global_vec, mpisize) + + # Add the local values to the global values + PETSc.update!(global_vec, local_vec, PETSc.ADD_VALUES) + + # end points added with neighbor due to ghost of size 1 + bot_val = mpisize + mpirank + (mpirank == 0 ? 0 : mpirank - 1) + top_val = mpisize + mpirank + (mpirank == mpisize - 1 ? 0 : mpirank + 1) + @test global_vec[corners.lower[1]] == bot_val + @test global_vec[corners.upper[1]] == top_val + + # Center is just self plus the global + for i in (corners.lower[1] + 1):(corners.upper[1] - 1) + @test global_vec[i] == mpisize + mpirank + end + + # reset the local values with the global values + PETSc.update!(local_vec, global_vec, PETSc.INSERT_VALUES) + + # My first value and my ghost should be the bot/top values + @test local_vec[1] == bot_val + @test local_vec[2] == bot_val + @test local_vec[end - 1] == top_val + @test local_vec[end] == top_val + + # interior is just self plus the global + for i in 3:(length(local_vec) - 2) + @test local_vec[i] == mpisize + mpirank + end + + # Test DM Coordinates + coord_da = PETSc.getcoordinateDM(da) + # Crank it up to 11! + xmin, xmax = 0, 11 + PETSc.setuniformcoordinates!(coord_da, (xmin,), (xmax,)) + coord_vec = PETSc.getcoordinateslocal(coord_da) + Δx = (xmax - xmin) / (global_size - 1) + + # Figure out the values we should have in the coordinate vector + ghost_lower = corners.lower[1] - (mpirank == 0 ? 0 : 1) + ghost_upper = corners.upper[1] + (mpirank == mpisize - 1 ? 0 : 1) + for (loc, glo) in enumerate(ghost_lower:ghost_upper) + @test coord_vec[loc] ≈ (glo - 1) * Δx + end + PETSc.finalize(petsclib) + end +end + +nothing diff --git a/test/mpi_examples.jl b/test/mpi_examples.jl new file mode 100644 index 00000000..6cff2e48 --- /dev/null +++ b/test/mpi_examples.jl @@ -0,0 +1,31 @@ +using Test +using MPI + +function find_sources(path::String, sources=String[]) + if isdir(path) + for entry in readdir(path) + find_sources(joinpath(path, entry), sources) + end + elseif endswith(path, ".jl") + push!(sources, path) + end + sources +end + +@testset "mpi examples" begin + examples_dir = joinpath(@__DIR__, "..", "examples") + examples = find_sources(examples_dir) + filter!(file -> readline(file) == "# INCLUDE IN MPI TEST", examples) + + @testset "$(basename(example))" for example in examples + code = """ + $(Base.load_path_setup_code()) + include($(repr(example))) + """ + @test mpiexec() do mpi_cmd + cmd = `$mpi_cmd -n 4 $(Base.julia_cmd()) --startup-file=no -e $code` + @debug "Testing $example" Text(code) cmd + success(pipeline(cmd, stderr=stderr)) + end + end +end diff --git a/test/old_test.jl b/test/old_test.jl new file mode 100644 index 00000000..d3f9564f --- /dev/null +++ b/test/old_test.jl @@ -0,0 +1,93 @@ +using Test +using PETSc, MPI, LinearAlgebra, SparseArrays +PETSc.initialize() + +@testset "Tests" begin + m,n = 20,20 + x = randn(n) + V = PETSc.VecSeq(x) + + + @test norm(x) ≈ norm(V) rtol=10eps() + + S = sprand(m,n,0.1) + I + M = PETSc.MatSeqAIJ(S) + + @test norm(S) ≈ norm(M) rtol=10eps() + + w = M*x + @test w ≈ S*x + + ksp = PETSc.KSP(M; ksp_rtol=1e-8, pc_type="jacobi", ksp_monitor=nothing) + #PETSc.settolerances!(ksp; rtol=1e-8) + + @test PETSc.gettype(ksp) == "gmres" # default + + pc = PETSc.PC(ksp) + @test PETSc.nrefs(pc) == 2 + @test PETSc.gettype(pc) == "jacobi" + + # create an extra handle, check ref count is incremented + pc_extra = PETSc.PC(ksp) + @test PETSc.nrefs(pc) == 3 + # destroy extra handle, check ptr is set to null, ref count is decremented + PETSc.destroy(pc_extra) + @test pc_extra.ptr == C_NULL + @test PETSc.nrefs(pc) == 2 + + # safe to call destroy on null pointer + PETSc.destroy(pc_extra) + + # set new pc, check ref counts are modified by ksp + pc_new = PETSc.PC{Float64}(MPI.COMM_SELF) + @test PETSc.nrefs(pc_new) == 1 + PETSc.setpc!(ksp, pc_new) + @test PETSc.nrefs(pc_new) == 2 + @test PETSc.nrefs(pc) == 1 + + PETSc.setpc!(ksp, pc) + @test PETSc.nrefs(pc_new) == 1 + @test PETSc.nrefs(pc) == 2 + + + y = ksp \ w + @test S*y ≈ w rtol=1e-8 + + + w = M'*x + @test w ≈ S'*x + + y = ksp' \ w + @test S'*y ≈ w rtol=1e-8 + + + f!(y,x) = y .= 2 .*x + + M = PETSc.MatShell{Float64}(f!,10,10) + + x = rand(10) + + @test M*x ≈ 2x + @test PETSc.KSP(M) \ x ≈ x/2 + + + function F!(fx, x) + fx[1] = x[1]^2 + x[1]*x[2] - 3 + fx[2] = x[1]*x[2] + x[2]^2 - 6 + end + + J = zeros(2,2) + PJ = PETSc.MatSeqDense(J) + function updateJ!(x, args...) + J[1,1] = 2x[1] + x[2] + J[1,2] = x[1] + J[2,1] = x[2] + J[2,2] = x[1] + 2x[2] + end + + S = PETSc.SNES{Float64}(MPI.COMM_SELF; ksp_rtol=1e-4, pc_type="none") + PETSc.setfunction!(S, F!, PETSc.VecSeq(zeros(2))) + PETSc.setjacobian!(S, updateJ!, PJ, PJ) + @test PETSc.solve!([2.0,3.0], S) ≈ [1.0,2.0] rtol=1e-4 +end + diff --git a/test/runtests.jl b/test/runtests.jl index a1159aae..5cab935f 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -1,96 +1,28 @@ -include("options.jl") - using Test -using PETSc, MPI, LinearAlgebra, SparseArrays -PETSc.initialize() - -@testset "Tests" begin - m,n = 20,20 - x = randn(n) - V = PETSc.VecSeq(x) - - - @test norm(x) ≈ norm(V) rtol=10eps() - - S = sprand(m,n,0.1) + I - M = PETSc.MatSeqAIJ(S) - - @test norm(S) ≈ norm(M) rtol=10eps() - - w = M*x - @test w ≈ S*x - - ksp = PETSc.KSP(M; ksp_rtol=1e-8, pc_type="jacobi", ksp_monitor=nothing) - #PETSc.settolerances!(ksp; rtol=1e-8) - - @test PETSc.gettype(ksp) == "gmres" # default - - pc = PETSc.PC(ksp) - @test PETSc.nrefs(pc) == 2 - @test PETSc.gettype(pc) == "jacobi" - - # create an extra handle, check ref count is incremented - pc_extra = PETSc.PC(ksp) - @test PETSc.nrefs(pc) == 3 - # destroy extra handle, check ptr is set to null, ref count is decremented - PETSc.destroy(pc_extra) - @test pc_extra.ptr == C_NULL - @test PETSc.nrefs(pc) == 2 - - # safe to call destroy on null pointer - PETSc.destroy(pc_extra) - - # set new pc, check ref counts are modified by ksp - pc_new = PETSc.PC{Float64}(MPI.COMM_SELF) - @test PETSc.nrefs(pc_new) == 1 - PETSc.setpc!(ksp, pc_new) - @test PETSc.nrefs(pc_new) == 2 - @test PETSc.nrefs(pc) == 1 - - PETSc.setpc!(ksp, pc) - @test PETSc.nrefs(pc_new) == 1 - @test PETSc.nrefs(pc) == 2 - - - y = ksp \ w - @test S*y ≈ w rtol=1e-8 - - - w = M'*x - @test w ≈ S'*x - - y = ksp' \ w - @test S'*y ≈ w rtol=1e-8 - - - f!(y,x) = y .= 2 .*x - - M = PETSc.MatShell{Float64}(f!,10,10) - - x = rand(10) - - @test M*x ≈ 2x - @test PETSc.KSP(M) \ x ≈ x/2 - - - function F!(fx, x) - fx[1] = x[1]^2 + x[1]*x[2] - 3 - fx[2] = x[1]*x[2] + x[2]^2 - 6 - end - - J = zeros(2,2) - PJ = PETSc.MatSeqDense(J) - function updateJ!(x, args...) - J[1,1] = 2x[1] + x[2] - J[1,2] = x[1] - J[2,1] = x[2] - J[2,2] = x[1] + 2x[2] - end +using MPI + +# Do the MPI tests first so we do not have mpi running inside MPI +# XXX: Currently not working on windows, not sure why +if !Sys.iswindows() + @testset "mpi tests" begin + @test mpiexec() do mpi_cmd + cmd = `$mpi_cmd -n 4 $(Base.julia_cmd()) --project dmda.jl` + success(pipeline(cmd, stderr = stderr)) + end + end +end - S = PETSc.SNES{Float64}(MPI.COMM_SELF; ksp_rtol=1e-4, pc_type="none") - PETSc.setfunction!(S, F!, PETSc.VecSeq(zeros(2))) - PETSc.setjacobian!(S, updateJ!, PJ, PJ) - @test PETSc.solve!([2.0,3.0], S) ≈ [1.0,2.0] rtol=1e-4 +# Examples with the comment +# # INCLUDE IN MPI TEST +# will be run here +# XXX: Currently not working on windows reliably, not sure why +if !Sys.iswindows() + include("mpi_examples.jl") end +include("options.jl") +include("dmda.jl") +include("old_test.jl") + +# Run the examples to make sure they are all work include("examples.jl")