func (u *uniGradStruct) Initialize() error { initLoc := u.loc.Init() initObj := u.obj.Init() initGrad := u.grad.Init() // The initial values need to both be NaN or both not nan if math.IsNaN(initObj) { if !math.IsNaN(initGrad) { return errors.New("gofunopter: cubic: initial function value and gradient must either both be set or neither set") } // Both nan, so compute the initial fuction value and gradient initObj, initGrad, err := u.fun.ObjGrad(initLoc) if err != nil { return errors.New("gofunopter: cubic: error calling function during optimization") } u.obj.SetInit(initObj) u.grad.SetInit(initGrad) } else { if math.IsNaN(initGrad) { return errors.New("gofunopter: cubic: initial function value and gradient must either both be set or neither set") } } err := optimize.Initialize(u.loc, u.obj, u.grad) if err != nil { return err } err = u.optimizer.Initialize(u.loc, u.obj, u.grad) if err != nil { return err } return nil }
func (lbfgs *Lbfgs) Initialize(loc *multi.Location, obj *uni.Objective, grad *multi.Gradient) error { lbfgs.nDim = len(loc.Init()) // Now initialize the three to set the initial location to the current location err := optimize.Initialize(lbfgs.step) if err != nil { return errors.New("lbfgs: error initializing: " + err.Error()) } // Initialize rest of memory // Replace this with overwriting? lbfgs.q = make([]float64, lbfgs.nDim) lbfgs.a = make([]float64, lbfgs.NumStore) lbfgs.b = make([]float64, lbfgs.NumStore) lbfgs.sHist = make([][]float64, lbfgs.NumStore) lbfgs.yHist = make([][]float64, lbfgs.NumStore) lbfgs.rhoHist = make([]float64, lbfgs.NumStore) for i := range lbfgs.sHist { lbfgs.sHist[i] = make([]float64, lbfgs.nDim) lbfgs.yHist[i] = make([]float64, lbfgs.nDim) } lbfgs.gamma_k = 1.0 lbfgs.tmp = make([]float64, lbfgs.nDim) lbfgs.p_k = make([]float64, lbfgs.nDim) lbfgs.s_k = make([]float64, lbfgs.nDim) lbfgs.y_k = make([]float64, lbfgs.nDim) lbfgs.z = make([]float64, lbfgs.nDim) return nil }
func (m *multiGradStruct) Initialize() error { initLoc := m.loc.Init() initObj := m.obj.Init() initGrad := m.grad.Init() // The initial values need to both be NaN or both not nan if math.IsNaN(initObj) { if len(initGrad) != 0 { return errors.New("initial function value and gradient must either both be set or neither set") } // Both nan, so compute the initial fuction value and gradient initObj, initGrad, err := m.fun.ObjGrad(initLoc) if err != nil { return errors.New("error calling function during optimization: \n" + err.Error()) } m.obj.SetInit(initObj) m.grad.SetInit(initGrad) } else { if len(initGrad) == 0 { return errors.New("initial function value and gradient must either both be set or neither set") } } err := optimize.Initialize(m.loc, m.obj, m.grad) if err != nil { return err } err = m.optimizer.Initialize(m.loc, m.obj, m.grad) if err != nil { return err } return nil }