Skip to content

Commit 3bb7528

Browse files
authored
Algorithms constructors (#22)
* initial commit * add all solver types * update docstrings * polish references * update readme * update readme * add iterators properties * add verbosity test * fix default maxit * add assertions * add assertion
1 parent 0446f08 commit 3bb7528

17 files changed

Lines changed: 757 additions & 348 deletions

README.md

Lines changed: 14 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -9,24 +9,28 @@ This package can be used in combination with [ProximalOperators.jl](https://gith
99

1010
[StructuredOptimization.jl](https://github.com/kul-forbes/StructuredOptimization.jl) provides a higher-level interface to formulate and solve problems using (some of) the algorithms here included.
1111

12-
### Installation
12+
### Quick start
13+
14+
To install the package, simply issue the following command in the Julia REPL:
1315

1416
```julia
15-
julia> Pkg.add("ProximalAlgorithms")
17+
] add ProximalAlgorithms
1618
```
1719

20+
Check out [these test scripts](test/problems) for examples on how to apply
21+
the provided algorithms to problems.
22+
1823
### Implemented Algorithms
1924

2025
Algorithm | Function | Reference
2126
--------------------------------------|---------------|-----------
22-
Douglas-Rachford splitting algorithm | [`douglasrachford`](src/algorithms/douglasrachford.jl) | [[1]][eckstein_1989]
23-
Forward-backward splitting (i.e. proximal gradient) algorithm | [`forwardbackward`](src/algorithms/forwardbackward.jl) | [[2]][tseng_2008], [[3]][beck_2009]
24-
Chambolle-Pock primal dual algorithm | [`chambollepock`](src/algorithms/primaldual.jl) | [[4]][chambolle_2011]
25-
Vũ-Condat primal-dual algorithm | [`vucondat`](src/algorithms/primaldual.jl) | [[6]][vu_2013], [[7]][condat_2013]
26-
Davis-Yin splitting algorithm | [`davisyin`](src/algorithms/davisyin.jl) | [[9]][davis_2017]
27-
Asymmetric forward-backward-adjoint algorithm | [`afba`](src/algorithms/primaldual.jl) | [[10]][latafat_2017]
28-
PANOC (L-BFGS) | [`panoc`](src/algorithms/panoc.jl) | [[11]][stella_2017]
29-
ZeroFPR (L-BFGS) | [`zerofpr`](src/algorithms/zerofpr.jl) | [[12]][themelis_2018]
27+
Douglas-Rachford splitting algorithm | [`DouglasRachford`](src/algorithms/douglasrachford.jl) | [[1]][eckstein_1989]
28+
Forward-backward splitting (i.e. proximal gradient) algorithm | [`ForwardBackward`](src/algorithms/forwardbackward.jl) | [[2]][tseng_2008], [[3]][beck_2009]
29+
Vũ-Condat primal-dual algorithm | [`VuCondat`](src/algorithms/primaldual.jl) | [[4]][chambolle_2011], [[6]][vu_2013], [[7]][condat_2013]
30+
Davis-Yin splitting algorithm | [`DavisYin`](src/algorithms/davisyin.jl) | [[9]][davis_2017]
31+
Asymmetric forward-backward-adjoint algorithm | [`AFBA`](src/algorithms/primaldual.jl) | [[10]][latafat_2017]
32+
PANOC (L-BFGS) | [`PANOC`](src/algorithms/panoc.jl) | [[11]][stella_2017]
33+
ZeroFPR (L-BFGS) | [`ZeroFPR`](src/algorithms/zerofpr.jl) | [[12]][themelis_2018]
3034

3135
### Contributing
3236

src/algorithms/davisyin.jl

Lines changed: 70 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,6 @@
1-
################################################################################
2-
# Davis-Yin splitting iterable
3-
#
4-
# See:
5-
# [1] Davis, Yin "A Three-Operator Splitting Scheme and its Optimization Applications",
6-
# Set-Valued and Variational Analysis, vol. 25, no. 4, pp 829–858 (2017).
7-
#
1+
# Davis, Yin. "A Three-Operator Splitting Scheme and its Optimization
2+
# Applications", Set-Valued and Variational Analysis, vol. 25, no. 4,
3+
# pp. 829–858 (2017).
84

95
using Base.Iterators
106
using ProximalAlgorithms.IterationTools
@@ -64,60 +60,90 @@ function Base.iterate(iter::DYS_iterable, state::DYS_state)
6460
return state, state
6561
end
6662

63+
# Solver
64+
65+
struct DavisYin{R}
66+
gamma::Maybe{R}
67+
lambda::R
68+
maxit::Int
69+
tol::R
70+
verbose::Bool
71+
freq::Int
72+
73+
function DavisYin{R}(; gamma::Maybe{R}=nothing, lambda::R=R(1.0),
74+
maxit::Int=10000, tol::R=R(1e-8), verbose::Bool=false, freq::Int=100
75+
) where R
76+
@assert gamma === nothing || gamma > 0
77+
@assert lambda > 0
78+
@assert maxit > 0
79+
@assert tol > 0
80+
@assert freq > 0
81+
new(gamma, lambda, maxit, tol, verbose, freq)
82+
end
83+
end
84+
85+
function (solver::DavisYin{R})(x0::AbstractArray{C};
86+
f=Zero(), g=Zero(), h=Zero(), A=I, L::Maybe{R}=nothing
87+
) where {R, C <: Union{R, Complex{R}}}
88+
89+
stop(state::DYS_state) = norm(state.res, Inf) <= solver.tol
90+
disp((it, state)) = @printf("%5d | %.3e\n", it, norm(state.res, Inf))
91+
92+
if solver.gamma === nothing
93+
if L !== nothing
94+
gamma = R(1)/L
95+
else
96+
error("You must specify either L or gamma")
97+
end
98+
else
99+
gamma = solver.gamma
100+
end
101+
102+
iter = DYS_iterable(f, g, h, A, x0, gamma, solver.lambda)
103+
iter = take(halt(iter, stop), solver.maxit)
104+
iter = enumerate(iter)
105+
if solver.verbose iter = tee(sample(iter, solver.freq), disp) end
106+
107+
num_iters, state_final = loop(iter)
108+
109+
return state_final.xf, state_final.xg, num_iters
110+
111+
end
112+
113+
# Outer constructors
114+
67115
"""
68-
davisyin(x0; f, g, h, A, [...])
116+
DavisYin([gamma, lambda, maxit, tol, verbose, freq])
69117
70-
Solves convex optimization problems of the form
118+
Instantiate the Davis-Yin splitting algorithm (see [1]) for solving
119+
convex optimization problems of the form
71120
72121
minimize f(x) + g(x) + h(A x),
73122
74-
where `h` is smooth and `A` is a linear mapping, using the Davis-Yin splitting
75-
algorithm, see [1].
76-
77-
Either of the following arguments must be specified:
123+
where `h` is smooth and `A` is a linear mapping (for example, a matrix).
124+
If `solver = DavisYin(args...)`, then the above problem is solved with
78125
79-
* `L::Real`, Lipschitz constant of the gradient of `h(A x)`.
80-
* `gamma:Real`, stepsize parameter.
126+
solver(x0; [f, g, h, A])
81127
82-
Other optional keyword arguments:
128+
Optional keyword arguments:
83129
130+
* `gamma::Real` (default: `nothing`), stepsize parameter.
84131
* `labmda::Real` (default: `1.0`), relaxation parameter, see [1].
85132
* `maxit::Integer` (default: `1000`), maximum number of iterations to perform.
86133
* `tol::Real` (default: `1e-8`), absolute tolerance on the fixed-point residual.
87134
* `verbose::Bool` (default: `true`), whether or not to print information during the iterations.
88135
* `freq::Integer` (default: `100`), frequency of verbosity.
89136
137+
If `gamma` is not specified at construction time, the following keyword
138+
argument must be specified at solve time:
139+
140+
* `L::Real`, Lipschitz constant of the gradient of `h(A x)`.
141+
90142
References:
91143
92144
[1] Davis, Yin. "A Three-Operator Splitting Scheme and its Optimization
93145
Applications", Set-Valued and Variational Analysis, vol. 25, no. 4,
94146
pp. 829–858 (2017).
95147
"""
96-
function davisyin(x0;
97-
f=Zero(), g=Zero(), h=Zero(), A=I,
98-
lambda=1.0, L=nothing, gamma=nothing,
99-
maxit=10_000, tol=1e-8,
100-
verbose=false, freq=100)
101-
102-
R = real(eltype(x0))
103-
104-
stop(state::DYS_state) = norm(state.res, Inf) <= R(tol)
105-
disp((it, state)) = @printf("%5d | %.3e\n", it, norm(state.res, Inf))
106-
107-
if gamma === nothing
108-
if L !== nothing
109-
gamma = R(1)/R(L)
110-
else
111-
error("You must specify either L or gamma")
112-
end
113-
end
114-
115-
iter = DYS_iterable(f, g, h, A, x0, R(gamma), R(lambda))
116-
iter = take(halt(iter, stop), maxit)
117-
iter = enumerate(iter)
118-
if verbose iter = tee(sample(iter, freq), disp) end
119-
120-
num_iters, state_final = loop(iter)
121-
122-
return state_final.xf, state_final.xg, num_iters
123-
end
148+
DavisYin(::Type{R}; kwargs...) where R = DavisYin{R}(; kwargs...)
149+
DavisYin(; kwargs...) = DavisYin(Float64; kwargs...)

src/algorithms/douglasrachford.jl

Lines changed: 55 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,6 @@
1-
################################################################################
2-
# Douglas-Rachford splitting iterable
3-
#
4-
# [1] Eckstein, Bertsekas "On the Douglas-Rachford Splitting Method and the
5-
# Proximal Point Algorithm for Maximal Monotone Operators*",
1+
# Eckstein, Bertsekas, "On the Douglas-Rachford Splitting Method and the
2+
# Proximal Point Algorithm for Maximal Monotone Operators",
63
# Mathematical Programming, vol. 55, no. 1, pp. 293-318 (1989).
7-
#
84

95
using Base.Iterators
106
using ProximalAlgorithms: LBFGS
@@ -39,16 +35,61 @@ function Base.iterate(iter::DRS_iterable, state::DRS_state=DRS_state(iter))
3935
return state, state
4036
end
4137

38+
# Solver
39+
40+
struct DouglasRachford{R}
41+
gamma::R
42+
maxit::Int
43+
tol::R
44+
verbose::Bool
45+
freq::Int
46+
47+
function DouglasRachford{R}(; gamma::R, maxit::Int=1000, tol::R=R(1e-8),
48+
verbose::Bool=false, freq::Int=100
49+
) where R
50+
@assert gamma > 0
51+
@assert maxit > 0
52+
@assert tol > 0
53+
@assert freq > 0
54+
new(gamma, maxit, tol, verbose, freq)
55+
end
56+
end
57+
58+
function (solver::DouglasRachford{R})(
59+
x0::AbstractArray{C}; f=Zero(), g=Zero()
60+
) where {R, C <: Union{R, Complex{R}}}
61+
62+
stop(state::DRS_state) = norm(state.res, Inf) <= solver.tol
63+
disp((it, state)) = @printf("%5d | %.3e\n", it, norm(state.res, Inf))
64+
65+
iter = DRS_iterable(f, g, x0, solver.gamma)
66+
iter = take(halt(iter, stop), solver.maxit)
67+
iter = enumerate(iter)
68+
if solver.verbose iter = tee(sample(iter, solver.freq), disp) end
69+
70+
num_iters, state_final = loop(iter)
71+
72+
return state_final.y, state_final.z, num_iters
73+
74+
end
75+
76+
# Outer constructors
77+
4278
"""
43-
douglasrachford(x0; f, g, gamma, [...])
79+
DouglasRachford([gamma, maxit, tol, verbose, freq])
80+
81+
Instantiate the Douglas-Rachford splitting algorithm (see [1]) for solving
82+
convex optimization problems of the form
83+
84+
minimize f(x) + g(x),
85+
86+
If `solver = DouglasRachford(args...)`, then the above problem is solved with
4487
45-
Minimizes `f(x) + g(x)` with respect to `x`, using the Douglas-Rachfor splitting
46-
algorithm starting from `x0`, with stepsize `gamma`.
47-
If unspecified, `f` and `g` default to the identically zero function,
48-
while `gamma` defaults to one.
88+
solver(x0, [f, g])
4989
50-
Other optional keyword arguments:
90+
Optional keyword arguments:
5191
92+
* `gamma::Real` (default: `1.0`), stepsize parameter.
5293
* `maxit::Integer` (default: `1000`), maximum number of iterations to perform.
5394
* `tol::Real` (default: `1e-8`), absolute tolerance on the fixed-point residual.
5495
* `verbose::Bool` (default: `true`), whether or not to print information during the iterations.
@@ -60,23 +101,5 @@ References:
60101
Proximal Point Algorithm for Maximal Monotone Operators",
61102
Mathematical Programming, vol. 55, no. 1, pp. 293-318 (1989).
62103
"""
63-
function douglasrachford(x0;
64-
f=Zero(), g=Zero(),
65-
gamma=1.0,
66-
maxit=1000, tol=1e-8,
67-
verbose=false, freq=100)
68-
69-
R = real(eltype(x0))
70-
71-
stop(state::DRS_state) = norm(state.res, Inf) <= R(tol)
72-
disp((it, state)) = @printf("%5d | %.3e\n", it, norm(state.res, Inf))
73-
74-
iter = DRS_iterable(f, g, x0, R(gamma))
75-
iter = take(halt(iter, stop), maxit)
76-
iter = enumerate(iter)
77-
if verbose iter = tee(sample(iter, freq), disp) end
78-
79-
num_iters, state_final = loop(iter)
80-
81-
return state_final.y, state_final.z, num_iters
82-
end
104+
DouglasRachford(::Type{R}; kwargs...) where R = DouglasRachford{R}(; kwargs...)
105+
DouglasRachford(; kwargs...) = DouglasRachford(Float64; kwargs...)

0 commit comments

Comments
 (0)