forked from gonum/optimize
/
local.go
397 lines (352 loc) · 11.2 KB
/
local.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
// Copyright ©2014 The gonum Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package optimize
import (
"fmt"
"math"
"time"
"github.com/gonum/floats"
"github.com/gonum/matrix/mat64"
)
// Local finds a local minimum of a function using a sequential algorithm.
// In order to maximize a function, multiply the output by -1.
//
// The first argument is of Function type representing the function to be minimized.
// Type switching is used to see if the function implements Gradient, FunctionGradient
// and Statuser.
//
// The second argument is the initial location at which to start the minimization.
// The initial location must be supplied, and must have a length equal to the
// problem dimension.
//
// The third argument contains the settings for the minimization. It is here that
// gradient tolerance, etc. are specified. The DefaultSettings() function
// can be called for a Settings struct with the default values initialized.
// If settings == nil, the default settings are used. Please see the documentation
// for the Settings structure for more information. The optimization Method used
// may also contain settings, see documentation for the appropriate optimizer.
//
// The final argument is the optimization method to use. If method == nil, then
// an appropriate default is chosen based on the properties of the other arguments
// (dimension, gradient-free or gradient-based, etc.). The optimization
// methods in this package are designed such that reasonable defaults occur
// if options are not specified explicitly. For example, the code
// method := &Bfgs{}
// creates a pointer to a new Bfgs struct. When minimize is called, the settings
// in the method will be populated with default values. The methods are also
// designed such that they can be reused in future calls to method.
//
// Local returns a Result struct and any error that occurred. Please see the
// documentation of Result for more information.
//
// Please be aware that the default behavior of Local is to find the minimum.
// For certain functions and optimization methods, this process can take many
// function evaluations. If you would like to put limits on this, for example
// maximum runtime or maximum function evaluations, please modify the Settings
// input struct.
func Local(p Problem, initX []float64, settings *Settings, method Method) (*Result, error) {
if p.Func == nil {
panic("optimize: objective function is undefined")
}
if len(initX) == 0 {
panic("optimize: initial X has zero length")
}
startTime := time.Now()
if method == nil {
method = getDefaultMethod(&p)
}
if err := p.satisfies(method); err != nil {
return nil, err
}
if p.Status != nil {
_, err := p.Status()
if err != nil {
return nil, err
}
}
if settings == nil {
settings = DefaultSettings()
}
if settings.Recorder != nil {
// Initialize Recorder first. If it fails, we avoid the (possibly
// time-consuming) evaluation of Func() and Grad() at the starting location.
err := settings.Recorder.Init()
if err != nil {
return nil, err
}
}
stats := &Stats{}
optLoc, evalType, err := getStartingLocation(&p, method, initX, stats, settings)
if err != nil {
return nil, err
}
if settings.FunctionConverge != nil {
settings.FunctionConverge.Init(optLoc.F)
}
// Runtime is the only Stats field that needs to be updated here.
stats.Runtime = time.Since(startTime)
// Send optLoc to Recorder before checking it for convergence.
if settings.Recorder != nil {
err = settings.Recorder.Record(optLoc, evalType, InitIteration, stats)
}
// Check if the starting location satisfies the convergence criteria.
status := checkConvergence(optLoc, InitIteration, stats, settings)
if status == NotTerminated && err == nil {
// The starting location is not good enough, we need to perform a
// minimization. The optimal location will be stored in-place in
// optLoc.
status, err = minimize(settings, method, &p, stats, optLoc, startTime)
}
if settings.Recorder != nil && err == nil {
// Send the optimal location to Recorder.
err = settings.Recorder.Record(optLoc, NoEvaluation, PostIteration, stats)
}
stats.Runtime = time.Since(startTime)
return &Result{
Location: *optLoc,
Stats: *stats,
Status: status,
}, err
}
func minimize(settings *Settings, method Method, p *Problem, stats *Stats, optLoc *Location, startTime time.Time) (status Status, err error) {
loc := &Location{}
copyLocation(loc, optLoc)
xNext := make([]float64, len(loc.X))
methodStatus, methodIsStatuser := method.(Statuser)
evalType, iterType, err := method.Init(loc, xNext)
if err != nil {
return Failure, err
}
for {
if p.Status != nil {
// Check the function status before evaluating.
status, err = p.Status()
if err != nil || status != NotTerminated {
return
}
}
// Perform evalType evaluation of the function at xNext and store the
// result in location.
evaluate(p, evalType, xNext, loc, stats)
// Update the stats and optLoc.
update(loc, optLoc, stats, iterType, startTime)
// Get the convergence status before recording the new location.
status = checkConvergence(optLoc, iterType, stats, settings)
if settings.Recorder != nil {
err = settings.Recorder.Record(loc, evalType, iterType, stats)
if err != nil {
if status == NotTerminated {
status = Failure
}
return
}
}
if status != NotTerminated {
return
}
if methodIsStatuser {
status, err = methodStatus.Status()
if err != nil || status != NotTerminated {
return
}
}
// Find the next location (stored in-place into xNext).
evalType, iterType, err = method.Iterate(loc, xNext)
if err != nil {
status = Failure
return
}
}
panic("optimize: unreachable")
}
func copyLocation(dst, src *Location) {
dst.X = resize(dst.X, len(src.X))
copy(dst.X, src.X)
dst.F = src.F
dst.Gradient = resize(dst.Gradient, len(src.Gradient))
copy(dst.Gradient, src.Gradient)
if src.Hessian != nil {
if dst.Hessian == nil || dst.Hessian.Symmetric() != len(src.X) {
dst.Hessian = mat64.NewSymDense(len(src.X), nil)
}
dst.Hessian.CopySym(src.Hessian)
}
}
func getDefaultMethod(p *Problem) Method {
if p.Grad != nil {
return &BFGS{}
}
return &NelderMead{}
}
// getStartingLocation allocates and initializes the starting location for the minimization.
func getStartingLocation(p *Problem, method Method, initX []float64, stats *Stats, settings *Settings) (*Location, EvaluationType, error) {
dim := len(initX)
loc := &Location{
X: make([]float64, dim),
}
copy(loc.X, initX)
if method.Needs().Gradient {
loc.Gradient = make([]float64, dim)
}
if method.Needs().Hessian {
loc.Hessian = mat64.NewSymDense(dim, nil)
}
evalType := NoEvaluation
if settings.UseInitialData {
loc.F = settings.InitialValue
if loc.Gradient != nil {
initG := settings.InitialGradient
if initG == nil {
panic("optimize: initial gradient is nil")
}
if len(initG) != dim {
panic("optimize: initial gradient size mismatch")
}
copy(loc.Gradient, initG)
}
if loc.Hessian != nil {
initH := settings.InitialHessian
if initH == nil {
panic("optimize: initial Hessian is nil")
}
if initH.Symmetric() != dim {
panic("optimize: initial Hessian size mismatch")
}
loc.Hessian.CopySym(initH)
}
} else {
evalType = FuncEvaluation
if loc.Gradient != nil {
evalType |= GradEvaluation
}
if loc.Hessian != nil {
evalType |= HessEvaluation
}
evaluate(p, evalType, loc.X, loc, stats)
}
if math.IsNaN(loc.F) {
return loc, evalType, ErrNaN
}
if math.IsInf(loc.F, 1) {
return loc, evalType, ErrInf
}
for _, v := range loc.Gradient {
if math.IsInf(v, 0) {
return loc, evalType, ErrGradInf
}
if math.IsNaN(v) {
return loc, evalType, ErrGradNaN
}
}
return loc, evalType, nil
}
func checkConvergence(loc *Location, iterType IterationType, stats *Stats, settings *Settings) Status {
if iterType == MajorIteration || iterType == InitIteration {
if loc.Gradient != nil {
norm := floats.Norm(loc.Gradient, math.Inf(1))
if norm < settings.GradientThreshold {
return GradientThreshold
}
}
if loc.F < settings.FunctionThreshold {
return FunctionThreshold
}
}
if iterType == MajorIteration && settings.FunctionConverge != nil {
status := settings.FunctionConverge.FunctionConverged(loc.F)
if status != NotTerminated {
return status
}
}
// Check every step for negative infinity because it could break the
// linesearches and -inf is the best you can do anyway.
if math.IsInf(loc.F, -1) {
return FunctionNegativeInfinity
}
if settings.FuncEvaluations > 0 {
if stats.FuncEvaluations >= settings.FuncEvaluations {
return FunctionEvaluationLimit
}
}
if settings.GradEvaluations > 0 {
if stats.GradEvaluations >= settings.GradEvaluations {
return GradientEvaluationLimit
}
}
if settings.HessEvaluations > 0 {
if stats.HessEvaluations >= settings.HessEvaluations {
return HessianEvaluationLimit
}
}
if settings.Runtime > 0 {
// TODO(vladimir-ch): It would be nice to update Runtime here.
if stats.Runtime >= settings.Runtime {
return RuntimeLimit
}
}
if iterType == MajorIteration && settings.MajorIterations > 0 {
if stats.MajorIterations >= settings.MajorIterations {
return IterationLimit
}
}
return NotTerminated
}
// invalidate marks all fields of Location with NaNs and it is the
// responsibility of Method to issue such EvaluationTypes that make Location
// valid again. Invalidation is done internally as a help for implementers to
// detect silent bugs in Methods using inconsistent Location, e.g., using
// Gradient without a GradEvaluation request.
func invalidate(loc *Location) {
loc.F = math.NaN()
if loc.Gradient != nil {
loc.Gradient[0] = math.NaN()
}
if loc.Hessian != nil {
loc.Hessian.SetSym(0, 0, math.NaN())
}
}
// evaluate evaluates the problem given by p at xNext, stores the answer into
// loc and updates stats. If loc.X is not equal to xNext, then unused fields of
// loc are set to NaN.
// evaluate panics if the function does not support the requested evalType.
func evaluate(p *Problem, evalType EvaluationType, xNext []float64, loc *Location, stats *Stats) {
if !floats.Equal(loc.X, xNext) {
if evalType == NoEvaluation {
// Optimizers should not request NoEvaluation at a new location.
// The intent and therefore an appropriate action are both unclear.
panic("optimize: no evaluation requested at new location")
}
invalidate(loc)
copy(loc.X, xNext)
}
toEval := evalType
if evalType&FuncEvaluation != 0 {
loc.F = p.Func(loc.X)
stats.FuncEvaluations++
toEval &= ^FuncEvaluation
}
if evalType&GradEvaluation != 0 {
p.Grad(loc.X, loc.Gradient)
stats.GradEvaluations++
toEval &= ^GradEvaluation
}
if evalType&HessEvaluation != 0 {
p.Hess(loc.X, loc.Hessian)
stats.HessEvaluations++
toEval &= ^HessEvaluation
}
if toEval != NoEvaluation {
panic(fmt.Sprintf("optimize: unknown evaluation type %v", evalType))
}
}
// update updates the stats given the new evaluation
func update(loc *Location, optLoc *Location, stats *Stats, iterType IterationType, startTime time.Time) {
if iterType == MajorIteration {
stats.MajorIterations++
}
if loc.F <= optLoc.F {
copyLocation(optLoc, loc)
}
stats.Runtime = time.Since(startTime)
}