-
-
Notifications
You must be signed in to change notification settings - Fork 58
/
Copy pathadm.jl
139 lines (107 loc) · 3.1 KB
/
adm.jl
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
"""
$(TYPEDEF)
Optimizer for finding a sparse basis vector in a subspace based on [this paper](https://arxiv.org/pdf/1412.4659.pdf).
It solves the following problem
```math
\\argmin_{x} \\|x\\|_0 ~s.t.~Ax= 0
```
# Fields
$(FIELDS)
# Example
```julia
ADM()
ADM(λ = 0.1)
```
## Note
While useable for implicit problems, a better choice in general is given by the
`ImplicitOptimizer` which tends to be more robust.
"""
mutable struct ADM{T} <: AbstractSubspaceOptimizer{T}
"""Sparsity threshold"""
λ::T
function ADM(threshold::T = 1e-1) where T
@assert all(one(eltype(T)) .> threshold .> zero(eltype(T))) "Threshold must be positive definite and less than 1"
return new{T}(threshold)
end
end
Base.summary(::ADM) = "ADM"
function (opt::ADM{T})(X, A, Y, λ::V = first(opt.λ);
maxiter::Int64 = maximum(size(A)), abstol::V = eps(eltype(T)), progress = nothing,
rtol::V = zero(eltype(T)),
atol::V = convert(eltype(T), 0.99),
f::Function = F(opt), g::Function = G(opt)) where {T, V}
n,m = size(A)
ny, my = size(Y)
nx, mx = size(X)
nq, mq = 0,0
# Closure for the pareto function
fg(x, A, y) = (g∘f)(x, A, y)
fg(x, A) = (g∘f)(x,A)
# Init all variables
R = SoftThreshold()
xzero = zero(eltype(T))
obj = xzero
sparsity = xzero
conv_measure = xzero
iters = 0
converged = false
max_ind = 0
_progress = isa(progress, Progress)
initial_prog = 0
N = nullspace(A[:,:], atol = atol)
Q = deepcopy(N)
map(x->normalize!(x), eachrow(Q))
Q_ = deepcopy(N)
x = N'Q
@views while (iters < maxiter) && !converged
iters += 1
R(x, N'Q, λ)
mul!(Q, N, x)
map(x->normalize!(x), eachrow(Q))
conv_measure = norm(Q .- Q_, 2)
if _progress
sparsity, obj = f(Q,A,λ)
ProgressMeter.next!(
progress;
showvalues = [
(:Threshold, λ), (:Objective, obj), (:Sparsity, sparsity),
(:Convergence, conv_measure),
]
)
end
if conv_measure < abstol
converged = true
else
Q_ .= Q
end
end
clip_by_threshold!(Q, λ)
# Reduce the solution size to linear independent columns
@views Q = linear_independent_columns(Q, rtol)
# Indicate if already used
_included = zeros(Bool, my, size(Q, 2))
@views for i in 1:my, j in 1:size(Q, 2)
# Check, if already included
any(_included[:, j]) && continue
if @views evaluate_pareto!(X[:, i], Q[:,j] , fg, A, λ)
_included[i,j] = true
end
end
if _progress
sparsity, obj = f(X,A,λ)
ProgressMeter.update!(
progress,
initial_prog + maxiter -1
)
ProgressMeter.next!(
progress;
showvalues = [
(:Threshold, λ), (:Objective, obj), (:Sparsity, sparsity),
]
)
end
if rank(X'X) < my
@warn "$opt @ $λ has found illconditioned equations. Vary the threshold or relative tolerance."
end
return
end