Skip to content

Commit 4e04b32

Browse files
Sébastien LoiselSébastien Loisel
authored andcommitted
Add maximum() and minimum() for Vec
- Add _vec_max and _vec_min low-level PETSc wrappers using VecMax/VecMin - Add Base.maximum(v::Vec) and Base.minimum(v::Vec) with docstrings - Add test/test_minmax.jl with 5 test cases - Update user guide and API reference with Reductions section - Bump version to 0.1.18
1 parent 286d36e commit 4e04b32

6 files changed

Lines changed: 224 additions & 1 deletion

File tree

Project.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
name = "SafePETSc"
22
uuid = "50acdc01-ce88-4ca7-bd87-6916c254362e"
33
authors = ["Sébastien Loisel <S.Loisel@hw.ac.uk>"]
4-
version = "0.1.17"
4+
version = "0.1.18"
55

66
[deps]
77
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"

docs/src/api/vectors.md

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -113,6 +113,28 @@ w = v' * A # Adjoint-vector times matrix
113113
LinearAlgebra.mul!(w, v', A) # In-place
114114
```
115115

116+
### Reductions
117+
118+
All reduction operations are **collective** - all ranks must call them.
119+
120+
```@docs
121+
Base.sum(::SafePETSc.Vec)
122+
Base.maximum(::SafePETSc.Vec)
123+
Base.minimum(::SafePETSc.Vec)
124+
```
125+
126+
#### Norms and Dot Products
127+
128+
```julia
129+
# Vector norms
130+
n2 = norm(v) # 2-norm (default)
131+
n1 = norm(v, 1) # 1-norm
132+
ninf = norm(v, Inf) # Infinity norm
133+
134+
# Dot product
135+
d = dot(v, w) # Or: v' * w
136+
```
137+
116138
### Properties
117139

118140
```julia

docs/src/guide/vectors.md

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -99,6 +99,29 @@ x_adj = x'
9999
result = x' * A # Returns adjoint vector
100100
```
101101

102+
### Reductions
103+
104+
Compute scalar values from distributed vectors. All reduction operations are **collective** - all ranks must call them and receive the same result.
105+
106+
```julia
107+
# Sum of all elements
108+
s = sum(v)
109+
110+
# Maximum and minimum values
111+
max_val = maximum(v)
112+
min_val = minimum(v)
113+
114+
# Norms
115+
n2 = norm(v) # 2-norm (Euclidean), default
116+
n1 = norm(v, 1) # 1-norm (sum of absolute values)
117+
ninf = norm(v, Inf) # Infinity norm (maximum absolute value)
118+
119+
# Dot product
120+
d = dot(v, w) # Or equivalently: v' * w
121+
```
122+
123+
These operations use PETSc's efficient parallel implementations internally.
124+
102125
### Concatenation
103126

104127
Vectors can be concatenated to form new vectors or matrices:

src/vec.jl

Lines changed: 56 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -664,6 +664,46 @@ function Base.sum(v::Vec{T}) where {T}
664664
return result
665665
end
666666

667+
"""
668+
Base.maximum(v::Vec{T}) -> Real
669+
670+
**MPI Collective**
671+
672+
Compute the maximum element in a distributed PETSc vector.
673+
674+
This is a collective operation - all ranks must call it and will receive the same result.
675+
Uses PETSc's VecMax function internally.
676+
677+
# Example
678+
```julia
679+
v = Vec_uniform([1.0, 4.0, 2.0, 3.0])
680+
m = maximum(v) # Returns 4.0 on all ranks
681+
```
682+
"""
683+
function Base.maximum(v::Vec{T}) where {T}
684+
return _vec_max(v.obj.v)
685+
end
686+
687+
"""
688+
Base.minimum(v::Vec{T}) -> Real
689+
690+
**MPI Collective**
691+
692+
Compute the minimum element in a distributed PETSc vector.
693+
694+
This is a collective operation - all ranks must call it and will receive the same result.
695+
Uses PETSc's VecMin function internally.
696+
697+
# Example
698+
```julia
699+
v = Vec_uniform([1.0, 4.0, 2.0, 3.0])
700+
m = minimum(v) # Returns 1.0 on all ranks
701+
```
702+
"""
703+
function Base.minimum(v::Vec{T}) where {T}
704+
return _vec_min(v.obj.v)
705+
end
706+
667707
# Norm of a vector: norm(v, p) (returns scalar)
668708
# Implements LinearAlgebra.norm to support standard Julia syntax
669709
"""
@@ -789,6 +829,22 @@ PETSc.@for_libpetsc begin
789829
v, PETSc.NORM_INFINITY, result)
790830
return result[]
791831
end
832+
833+
function _vec_max(v::PETSc.Vec{$PetscScalar})
834+
result = Ref{$PetscReal}()
835+
PETSc.@chk ccall((:VecMax, $libpetsc), PETSc.PetscErrorCode,
836+
(PETSc.CVec, Ptr{Cvoid}, Ptr{$PetscReal}),
837+
v, C_NULL, result)
838+
return result[]
839+
end
840+
841+
function _vec_min(v::PETSc.Vec{$PetscScalar})
842+
result = Ref{$PetscReal}()
843+
PETSc.@chk ccall((:VecMin, $libpetsc), PETSc.PetscErrorCode,
844+
(PETSc.CVec, Ptr{Cvoid}, Ptr{$PetscReal}),
845+
v, C_NULL, result)
846+
return result[]
847+
end
792848
end
793849

794850
# In-place adjoint vector-matrix multiplication: w = v' * A (reuses pre-allocated w)

test/runtests.jl

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -108,6 +108,10 @@ end
108108
run_mpi_test(joinpath(@__DIR__, "test_vec_norm.jl"); nprocs=4, expect_success=true)
109109
end
110110

111+
@testset "Vec min/max Tests" begin
112+
run_mpi_test(joinpath(@__DIR__, "test_minmax.jl"); nprocs=4, expect_success=true)
113+
end
114+
111115
@testset "Mat_uniform Tests" begin
112116
run_mpi_test(joinpath(@__DIR__, "test_mat_uniform.jl"); nprocs=4, expect_success=true)
113117
end

test/test_minmax.jl

Lines changed: 118 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,118 @@
1+
using Test
2+
using MPI
3+
using SafePETSc
4+
SafePETSc.Init()
5+
using PETSc
6+
using SafePETSc.SafeMPI
7+
include(joinpath(@__DIR__, "mpi_test_harness.jl"))
8+
using .MPITestHarness: QuietTestSet
9+
10+
comm = MPI.COMM_WORLD
11+
rank = MPI.Comm_rank(comm)
12+
nranks = MPI.Comm_size(comm)
13+
14+
if rank == 0
15+
println("[DEBUG] Vec min/max test starting")
16+
flush(stdout)
17+
end
18+
19+
ts = @testset QuietTestSet "Vec min/max tests" begin
20+
21+
if rank == 0
22+
println("[DEBUG] Test 1: Basic maximum and minimum")
23+
flush(stdout)
24+
end
25+
26+
# Test 1: Basic maximum and minimum
27+
v = Vec_uniform([1.0, 4.0, 2.0, 3.0])
28+
@test sum(v) 10.0
29+
@test maximum(v) 4.0
30+
@test minimum(v) 1.0
31+
32+
SafeMPI.check_and_destroy!()
33+
MPI.Barrier(comm)
34+
35+
if rank == 0
36+
println("[DEBUG] Test 2: Negative values")
37+
flush(stdout)
38+
end
39+
40+
# Test 2: Negative values
41+
v2 = Vec_uniform([-5.0, 2.0, -1.0, 3.0])
42+
@test maximum(v2) 3.0
43+
@test minimum(v2) -5.0
44+
45+
SafeMPI.check_and_destroy!()
46+
MPI.Barrier(comm)
47+
48+
if rank == 0
49+
println("[DEBUG] Test 3: All same values")
50+
flush(stdout)
51+
end
52+
53+
# Test 3: All same values
54+
v3 = Vec_uniform([7.0, 7.0, 7.0, 7.0])
55+
@test maximum(v3) 7.0
56+
@test minimum(v3) 7.0
57+
58+
SafeMPI.check_and_destroy!()
59+
MPI.Barrier(comm)
60+
61+
if rank == 0
62+
println("[DEBUG] Test 4: Single element")
63+
flush(stdout)
64+
end
65+
66+
# Test 4: Single element
67+
v4 = Vec_uniform([42.0])
68+
@test maximum(v4) 42.0
69+
@test minimum(v4) 42.0
70+
71+
SafeMPI.check_and_destroy!()
72+
MPI.Barrier(comm)
73+
74+
if rank == 0
75+
println("[DEBUG] Test 5: Larger vector")
76+
flush(stdout)
77+
end
78+
79+
# Test 5: Larger vector distributed across ranks
80+
v5 = Vec_uniform(collect(1.0:16.0))
81+
@test maximum(v5) 16.0
82+
@test minimum(v5) 1.0
83+
@test sum(v5) 136.0 # 16*17/2
84+
85+
SafeMPI.check_and_destroy!()
86+
MPI.Barrier(comm)
87+
88+
end # End of QuietTestSet
89+
90+
# Aggregate per-rank counts and print a single summary on root
91+
local_counts = [
92+
get(ts.counts, :pass, 0),
93+
get(ts.counts, :fail, 0),
94+
get(ts.counts, :error, 0),
95+
get(ts.counts, :broken, 0),
96+
get(ts.counts, :skip, 0),
97+
]
98+
99+
global_counts = similar(local_counts)
100+
MPI.Allreduce!(local_counts, global_counts, +, comm)
101+
102+
if rank == 0
103+
println("Test Summary: Vec min/max tests (aggregated across $(nranks) ranks)")
104+
println(" Pass: $(global_counts[1]) Fail: $(global_counts[2]) Error: $(global_counts[3]) Broken: $(global_counts[4]) Skip: $(global_counts[5])")
105+
end
106+
107+
MPI.Barrier(comm)
108+
109+
if global_counts[2] > 0 || global_counts[3] > 0
110+
Base.exit(1)
111+
end
112+
113+
MPI.Barrier(comm)
114+
115+
if rank == 0
116+
println("[DEBUG] Vec min/max test file completed successfully")
117+
flush(stdout)
118+
end

0 commit comments

Comments
 (0)