1
1
"""
2
- NewtonRaphson(; concrete_jac = nothing, linsolve = nothing,
3
- precs = DEFAULT_PRECS, adkwargs...)
2
+ NewtonRaphson(; concrete_jac = nothing, linsolve = nothing, linesearch = LineSearch(),
3
+ precs = DEFAULT_PRECS, reuse = true, reusetol = 1e-6, adkwargs...)
4
4
5
5
An advanced NewtonRaphson implementation with support for efficient handling of sparse
6
6
matrices via colored automatic differentiation and preconditioned linear solvers. Designed
@@ -29,31 +29,49 @@ for large-scale and numerically-difficult nonlinear systems.
29
29
- `linesearch`: the line search algorithm to use. Defaults to [`LineSearch()`](@ref),
30
30
which means that no line search is performed. Algorithms from `LineSearches.jl` can be
31
31
used here directly, and they will be converted to the correct `LineSearch`.
32
+ - `reuse`: Determines if the Jacobian is reused between (quasi-)Newton steps. Defaults to
33
+ `true`. If `true` we check how far we stepped with the same Jacobian, and automatically
34
+ take a new Jacobian if we stepped more than `reusetol` or if convergence slows or starts
35
+ to diverge. If `false`, the Jacobian is updated in each step.
32
36
"""
33
37
@concrete struct NewtonRaphson{CJ, AD} < :
34
38
AbstractNewtonAlgorithm{CJ, AD}
35
39
ad:: AD
36
40
linsolve
37
41
precs
38
42
linesearch
43
+ reusetol
44
+ reuse:: Bool
39
45
end
40
46
41
47
function set_ad (alg:: NewtonRaphson{CJ} , ad) where {CJ}
42
- return NewtonRaphson {CJ} (ad, alg. linsolve, alg. precs, alg. linesearch)
48
+ return NewtonRaphson {CJ} (ad,
49
+ alg. linsolve,
50
+ alg. precs,
51
+ alg. linesearch,
52
+ alg. reusetol,
53
+ alg. reuse)
43
54
end
44
55
45
56
function NewtonRaphson (; concrete_jac = nothing , linsolve = nothing ,
46
- linesearch = LineSearch (), precs = DEFAULT_PRECS, adkwargs... )
57
+ linesearch = LineSearch (), precs = DEFAULT_PRECS, reuse = true , reusetol = 1e-6 ,
58
+ adkwargs... )
47
59
ad = default_adargs_to_adtype (; adkwargs... )
48
60
linesearch = linesearch isa LineSearch ? linesearch : LineSearch (; method = linesearch)
49
- return NewtonRaphson {_unwrap_val(concrete_jac)} (ad, linsolve, precs, linesearch)
61
+ return NewtonRaphson {_unwrap_val(concrete_jac)} (ad,
62
+ linsolve,
63
+ precs,
64
+ linesearch,
65
+ reusetol,
66
+ reuse)
50
67
end
51
68
52
69
@concrete mutable struct NewtonRaphsonCache{iip} <: AbstractNonlinearSolveCache{iip}
53
70
f
54
71
alg
55
72
u
56
- u_prev
73
+ uprev
74
+ Δu
57
75
fu1
58
76
fu2
59
77
du
@@ -81,22 +99,40 @@ function SciMLBase.__init(prob::NonlinearProblem{uType, iip}, alg_::NewtonRaphso
81
99
alg = get_concrete_algorithm (alg_, prob)
82
100
@unpack f, u0, p = prob
83
101
u = alias_u0 ? u0 : deepcopy (u0)
102
+ uprev = deepcopy (u0)
103
+ Δu = zero (u0)
104
+
84
105
fu1 = evaluate_f (prob, u)
85
106
uf, linsolve, J, fu2, jac_cache, du = jacobian_caches (alg, f, u, p, Val (iip);
86
107
linsolve_kwargs)
87
108
88
109
abstol, reltol, tc_cache = init_termination_cache (abstol, reltol, fu1, u,
89
110
termination_condition)
90
111
91
- return NewtonRaphsonCache {iip} (f, alg, u, copy (u) , fu1, fu2, du, p, uf, linsolve, J,
112
+ return NewtonRaphsonCache {iip} (f, alg, u, uprev, Δu , fu1, fu2, du, p, uf, linsolve, J,
92
113
jac_cache, false , maxiters, internalnorm, ReturnCode. Default, abstol, reltol, prob,
93
114
NLStats (1 , 0 , 0 , 0 , 0 ),
94
115
init_linesearch_cache (alg. linesearch, f, u, p, fu1, Val (iip)), tc_cache)
95
116
end
96
117
97
118
function perform_step! (cache:: NewtonRaphsonCache{true} )
98
- @unpack u, u_prev, fu1, f, p, alg, J, linsolve, du = cache
99
- jacobian!! (J, cache)
119
+ @unpack u, uprev, Δu, fu1, f, p, alg, J, linsolve, du = cache
120
+ @unpack reuse = alg
121
+
122
+ if reuse
123
+ # check how far we stepped
124
+ @. Δu += u - uprev
125
+ update = cache. internalnorm (Δu) > alg. reusetol
126
+ if update || cache. stats. njacs == 0
127
+ jacobian!! (J, cache)
128
+ cache. stats. njacs += 1
129
+ Δu .*= false
130
+ end
131
+ else
132
+ jacobian!! (J, cache)
133
+ cache. stats. njacs += 1
134
+ end
135
+ cache. uprev .= u
100
136
101
137
# u = u - J \ fu
102
138
linres = dolinsolve (alg. precs, linsolve; A = J, b = _vec (fu1), linu = _vec (du),
@@ -112,16 +148,32 @@ function perform_step!(cache::NewtonRaphsonCache{true})
112
148
113
149
@. u_prev = u
114
150
cache. stats. nf += 1
115
- cache. stats. njacs += 1
116
151
cache. stats. nsolve += 1
117
152
cache. stats. nfactors += 1
118
153
return nothing
119
154
end
120
155
121
156
function perform_step! (cache:: NewtonRaphsonCache{false} )
122
- @unpack u, u_prev, fu1, f, p, alg, linsolve = cache
157
+ @unpack u, uprev, Δu, fu1, f, p, alg, linsolve = cache
158
+ @unpack reuse = alg
159
+
160
+ if reuse
161
+ # check how far we stepped
162
+ cache. Δu += u - uprev
163
+ update = cache. internalnorm (Δu) > alg. reusetol
164
+ if update || cache. stats. njacs == 0
165
+ cache. J = jacobian!! (cache. J, cache)
166
+ cache. stats. njacs += 1
167
+ cache. Δu *= false
168
+ end
169
+ else
170
+ cache. J = jacobian!! (cache. J, cache)
171
+ # cache.Δu *= false
172
+ cache. stats. njacs += 1
173
+ end
174
+
175
+ cache. uprev = u
123
176
124
- cache. J = jacobian!! (cache. J, cache)
125
177
# u = u - J \ fu
126
178
if linsolve === nothing
127
179
cache. du = fu1 / cache. J
@@ -140,7 +192,6 @@ function perform_step!(cache::NewtonRaphsonCache{false})
140
192
141
193
cache. u_prev = cache. u
142
194
cache. stats. nf += 1
143
- cache. stats. njacs += 1
144
195
cache. stats. nsolve += 1
145
196
cache. stats. nfactors += 1
146
197
return nothing
0 commit comments