Package gov.sandia.cognition.math.matrix

Examples of gov.sandia.cognition.math.matrix.Matrix.times()


      /*
       * Construct the measurement prior predictive likelihood
       * t_n (H*m^psi, d*C^psi/n)
       */
      final Vector mPriorPredMean = H.times(priorPsi.getMean());
      final Matrix mPriorPredCov = H.times(priorPsi.getCovariance()).times(H.transpose())
          .plus(Iy.scale(2d));

      // TODO FIXME
      final Matrix stPriorPredPrec = mPriorPredCov.inverse().scale(
View Full Code Here


      final Vector Hv = H.convertToVector();
      /*
       * TODO FIXME inverse!  ewww.
       */
      final Matrix postAInv = priorAInv.plus(Hv.outerProduct(Hv)).inverse();
      final Vector postPsiMean = postAInv.times(priorAInv.times(psiPriorSmpl).plus(
          H.transpose().times(postStateSample)));
      final MultivariateGaussian postPsi = predState.getPsiSS().clone();
      postPsi.setMean(postPsiMean);
      postPsi.setCovariance(postAInv);
     
View Full Code Here

      H.setSubMatrix(0, 0, Ij);
      H.setSubMatrix(0, xDim, MatrixFactory.getDefault().createDiagonal(predState.getStateSample()));
      final Vector postStateSample = posteriorState.sample(this.rng);
      final MultivariateGaussian priorPhi = predState.getPsiSS();
      final Vector phiPriorSmpl = priorPhi.sample(this.rng);
      final Vector xHdiff = postStateSample.minus(H.times(phiPriorSmpl));

      final double newN = scaleSS.getShape() + 1d;
      final double d = scaleSS.getScale() + xHdiff.dotProduct(xHdiff);
     
      scaleSS.setScale(d);
View Full Code Here

       * supporting the 1d case for now.
       */
      final Vector Hv = H.convertToVector();
      final Matrix postAInv = priorAInv.plus(Hv.outerProduct(Hv)).inverse();
      // TODO FIXME: ewww.  inverse.
      final Vector postPhiMean = postAInv.times(priorAInv.times(phiPriorSmpl).plus(
          H.transpose().times(postStateSample)));
      final MultivariateGaussian postPhi = systemOffsetsSS;
      postPhi.setMean(postPhiMean);
      postPhi.setCovariance(postAInv.scale(newScaleSmpl));
     
View Full Code Here

      for (int i = 0; i < this.numCategories; i++) {
  //        final Vector betaSample = particle.getLinearState().sample(this.random);
        final MultivariateGaussian predictivePrior = particle.getLinearState().clone();
        KalmanFilter kf = particle.getRegressionFilter(i);
        final Matrix G = kf.getModel().getA();
        predictivePrior.setMean(G.times(predictivePrior.getMean()));
        predictivePrior.setCovariance(
            G.times(predictivePrior.getCovariance()).times(G.transpose())
              .plus(kf.getModelCovariance()));
 
        // X * beta
 
View Full Code Here

        final MultivariateGaussian predictivePrior = particle.getLinearState().clone();
        KalmanFilter kf = particle.getRegressionFilter(i);
        final Matrix G = kf.getModel().getA();
        predictivePrior.setMean(G.times(predictivePrior.getMean()));
        predictivePrior.setCovariance(
            G.times(predictivePrior.getCovariance()).times(G.transpose())
              .plus(kf.getModelCovariance()));
 
        // X * beta
        final double lambda = Math.exp(data.getObservedData().times(
            predictivePrior.getMean()).getElement(0));
View Full Code Here

      final Vector betaMeanError = postBetaSmoothedSample.minus(priorBetaSmoothedSample);
      final ScaledInverseGammaCovDistribution updatedBetaCov = particle.getPriorBetaCov().clone();
      updateCovariancePrior(updatedBetaCov, betaMeanError);
      final Matrix betaCovSmpl = updatedBetaCov.sample(random);
      Preconditions.checkState(betaCovSmpl.getElement(0, 0) >= 0d);
      updatedBetaMean.setCovariance(lambdaSamplesMatrix.times(betaCovSmpl
          .times(updatedBetaMean.getCovariance())));

      /*
       * Now, do the above for the the global mean term.
       */
 
View Full Code Here

    final Matrix FG = F.times(G);
    final Matrix A = FG.times(R).times(FG.transpose()).plus(W);
    final Matrix Wtil =
        A.transpose().solve(FG.times(R.transpose())).transpose();

    final Vector aSmooth = a.plus(Wtil.times(y.minus(FG.times(a))));
    final Matrix RSmooth =
        R.minus(Wtil.times(A).times(Wtil.transpose()));
   
    return new MultivariateGaussian(aSmooth, RSmooth);
  }
View Full Code Here

    final Matrix FG = F.times(G);
    final Matrix A = FG.times(C).times(FG.transpose()).plus(W);
    final Matrix Wtil =
        A.transpose().solve(FG.times(C.transpose())).transpose();

    final Vector mSmooth = m.plus(Wtil.times(y.minus(FG.times(m))));
    final Matrix CSmooth =
        C.minus(Wtil.times(A).times(Wtil.transpose()));
    return new MultivariateGaussian(mSmooth, CSmooth);
  }
View Full Code Here

    final Matrix c2inv = Xt.times(obsDist.getCovarianceInverse()).times(X);

    final Matrix Cinv = c1inv.plus(c2inv);
    final Matrix C = Cinv.inverse();

    final Vector m = C.times(c1inv.times(m1).plus(
      Xt.times(obsDist.getCovarianceInverse()).times(m2)));

    prior.setMean(m);
    prior.setCovariance(C);
  }
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.