func (this *Scatter) Insert(x *matrix.DenseMatrix) { /* count++; Matrix delta = x.minus(mean); if (useSampleMean) mean = mean.plus(delta.times(1.0/count)); scatter = scatter.plus(delta.times(x.minus(mean).transpose())); */ this.Count++ delta, _ := x.MinusDense(this.Mean) deltaOverC := delta.Copy() deltaOverC.Scale(1 / float64(this.Count)) this.Mean.Add(deltaOverC) xMinusMean, _ := x.MinusDense(this.Mean) xMinusMeanT := xMinusMean.Transpose() deltaTimesXMinusMeanT, _ := delta.TimesDense(xMinusMeanT) this.S.Add(deltaTimesXMinusMeanT) }
func (this *Scatter) Remove(x *matrix.DenseMatrix) { /* int new_count = count-1; Matrix new_mu = mean; if (useSampleMean) new_mu = mean.plus(mean.minus(x).times(1.0/new_count)); scatter = scatter.minus((x.minus(new_mu)).times(x.minus(mean).transpose())); count = new_count; mean = new_mu; if (count == 0) { if (useSampleMean) mean = new Matrix(p, 1, 0); scatter = new Matrix(p, p, 0); } */ newCount := this.Count - 1 if newCount == 0 { p := this.S.Rows() this.S = matrix.Zeros(p, p) this.Mean = matrix.Zeros(p, 1) this.Count = newCount return } newMean, _ := this.Mean.MinusDense(x) newMean.Scale(1 / float64(newCount)) newMean.Add(this.Mean) xMinusNewMean, _ := x.MinusDense(newMean) xMinusMean, _ := x.MinusDense(this.Mean) xMinusMeanT := xMinusMean.Transpose() xCross, _ := xMinusNewMean.TimesDense(xMinusMeanT) this.S.Subtract(xCross) this.Count = newCount this.Mean = newMean }
func NewIWPosterior(M int, Psi *matrix.DenseMatrix) (this *IWPosterior) { this = new(IWPosterior) this.M, this.Psi = M, Psi.Copy() this.S = NewScatter(Psi.Rows()) return }
func (this *KnownVarianceLRPosterior) Remove(x, y *mx.DenseMatrix) { xxt, _ := x.TimesDense(x.Transpose()) this.XXt.Subtract(xxt) yxt, _ := y.TimesDense(x.Transpose()) this.YXt.Subtract(yxt) }
func (this *KnownVarianceLRPosterior) Insert(x, y *mx.DenseMatrix) { xxt, _ := x.TimesDense(x.Transpose()) this.XXt.Add(xxt) yxt, _ := y.TimesDense(x.Transpose()) this.YXt.Add(yxt) }
/* M is r x c, o x i Sigma is r x r, o x o Phi is c x c, i x i Sigma matches Y o x 1 output dimension Phi matches X i x 1 input dimension */ func NewKnownVarianceLRPosterior(M, Sigma, Phi *mx.DenseMatrix) (this *KnownVarianceLRPosterior) { if M.Rows() != Sigma.Rows() { panic("M.Rows != Sigma.Rows") } if M.Cols() != Phi.Cols() { panic("M.Cols != Phi.Cols") } if Sigma.Rows() != Sigma.Cols() { panic("Sigma is not square") } if Phi.Rows() != Phi.Cols() { panic("Phi is not square") } this = &KnownVarianceLRPosterior{ M: M, Sigma: Sigma, Phi: Phi, XXt: mx.Zeros(Phi.Cols(), Phi.Cols()), YXt: mx.Zeros(Sigma.Cols(), Phi.Cols()), } return }
/* If Y ~ N(AX, Sigma, I) and A ~ N(M, Sigma, Phi) this returns a sampler for P(A|X,Y,Sigma,M,Phi) */ func KnownVariancePosterior(Y, X, Sigma, M, Phi *mx.DenseMatrix) func() (A *mx.DenseMatrix) { o := Y.Rows() i := X.Rows() n := Y.Cols() if n != X.Cols() { panic("X and Y don't have the same number of columns") } if o != M.Rows() { panic("Y.Rows != M.Rows") } if i != M.Cols() { panic("Y.Rows != M.Cols") } if o != Sigma.Rows() { panic("Y.Rows != Sigma.Rows") } if Sigma.Cols() != Sigma.Rows() { panic("Sigma is not square") } if i != Phi.Rows() { panic("X.Rows != Phi.Rows") } if Phi.Cols() != Phi.Rows() { panic("Phi is not square") } Xt := X.Transpose() PhiInv, err := Phi.Inverse() if err != nil { panic(err) } XXt, err := X.TimesDense(Xt) if err != nil { panic(err) } XXtpPhiInv, err := XXt.PlusDense(PhiInv) if err != nil { panic(err) } Omega, err := XXtpPhiInv.Inverse() if err != nil { panic(err) } YXtpMPhiInv, err := Y.TimesDense(Xt) if err != nil { panic(err) } MPhiInv, err := M.TimesDense(PhiInv) if err != nil { panic(err) } err = YXtpMPhiInv.AddDense(MPhiInv) if err != nil { panic(err) } Mxy, err := YXtpMPhiInv.TimesDense(Omega) if err != nil { panic(err) } if false { fmt.Printf("Mxy:\n%v\n", Mxy) fmt.Printf("Sigma:\n%v\n", Sigma) fmt.Printf("Omega:\n%v\n", Omega) } return stat.MatrixNormal(Mxy, Sigma, Omega) }