Package org.jblas

Examples of org.jblas.DoubleMatrix


     * @param M
     * @return sigmoid = 1 ./ (1 + exp(-x));
     */
    @Override
    public DoubleMatrix valueAt(DoubleMatrix M) {
        DoubleMatrix Denom = (MatrixFunctions.exp(M.mul(-1))).addi(1);
        return Denom.rdivi(1);
    }
View Full Code Here


     * @param X input double matrix
     * @return sigmoid_prime = M.*(1-M), where M = sigmoid(X);
     */
    @Override
    public DoubleMatrix derivativeAt(DoubleMatrix X) {
        DoubleMatrix M = valueAt(X);
        return M.mul((M.mul(-1)).addi(1));
    }
View Full Code Here

    System.clearProperty("spark.driver.port");
  }

  static void validatePrediction(MatrixFactorizationModel model, int users, int products, int features,
      DoubleMatrix trueRatings, double matchThreshold, boolean implicitPrefs, DoubleMatrix truePrefs) {
    DoubleMatrix predictedU = new DoubleMatrix(users, features);
    List<scala.Tuple2<Object, double[]>> userFeatures = model.userFeatures().toJavaRDD().collect();
    for (int i = 0; i < features; ++i) {
      for (scala.Tuple2<Object, double[]> userFeature : userFeatures) {
        predictedU.put((Integer)userFeature._1(), i, userFeature._2()[i]);
      }
    }
    DoubleMatrix predictedP = new DoubleMatrix(products, features);

    List<scala.Tuple2<Object, double[]>> productFeatures =
      model.productFeatures().toJavaRDD().collect();
    for (int i = 0; i < features; ++i) {
      for (scala.Tuple2<Object, double[]> productFeature : productFeatures) {
        predictedP.put((Integer)productFeature._1(), i, productFeature._2()[i]);
      }
    }

    DoubleMatrix predictedRatings = predictedU.mmul(predictedP.transpose());

    if (!implicitPrefs) {
      for (int u = 0; u < users; ++u) {
        for (int p = 0; p < products; ++p) {
          double prediction = predictedRatings.get(u, p);
          double correct = trueRatings.get(u, p);
          Assert.assertTrue(String.format("Prediction=%2.4f not below match threshold of %2.2f",
                  prediction, matchThreshold), Math.abs(prediction - correct) < matchThreshold);
        }
      }
    } else {
      // For implicit prefs we use the confidence-weighted RMSE to test (ref Mahout's implicit ALS tests)
      double sqErr = 0.0;
      double denom = 0.0;
      for (int u = 0; u < users; ++u) {
        for (int p = 0; p < products; ++p) {
          double prediction = predictedRatings.get(u, p);
          double truePref = truePrefs.get(u, p);
          double confidence = 1.0 + /* alpha = */ 1.0 * Math.abs(trueRatings.get(u, p));
          double err = confidence * (truePref - prediction) * (truePref - prediction);
          sqErr += err;
          denom += confidence;
View Full Code Here

  }

  List<LabeledPoint> generateRidgeData(int numPoints, int numFeatures, double std) {
    org.jblas.util.Random.seed(42);
    // Pick weights as random values distributed uniformly in [-0.5, 0.5]
    DoubleMatrix w = DoubleMatrix.rand(numFeatures, 1).subi(0.5);
    return LinearDataGenerator.generateLinearInputAsList(0.0, w.data, numPoints, 42, std);
  }
View Full Code Here

  public Boolean classify(double[] features) {
    if (this.weights == null || this.variance == null) {
      this.init(features.length);
    }

    double margin = new DoubleMatrix(this.weights).dot(new DoubleMatrix(features));
    Boolean prediction = margin > 0 ? Boolean.TRUE : Boolean.FALSE;

    return prediction;
  }
View Full Code Here

  public void update(Boolean label, double[] features) {
    if (this.weights == null || this.variance == null) {
      this.init(features.length);
    }

    DoubleMatrix weightsVector = new DoubleMatrix(1, this.weights.length, this.weights);
    DoubleMatrix varianceMatrix = new DoubleMatrix(this.variance);
    DoubleMatrix featuresVector = new DoubleMatrix(1, features.length, features);

    double margin = weightsVector.dot(featuresVector);

    double labelAsDouble = label ? 1.0 : -1.0;
    if (margin * labelAsDouble < 1) {

      double confidence = featuresVector.dot(featuresVector.mmul(varianceMatrix));

      double beta = 1 / (confidence + this.r);
      double alpha = Math.max(0, beta * (1 - labelAsDouble * margin));
      DoubleMatrix delta = featuresVector.mmul(varianceMatrix).mul(alpha * labelAsDouble);

      boolean zeroVector = MathUtil.isZeros(delta);

      if (!zeroVector) {
        this.weights = weightsVector.add(delta).toArray();
View Full Code Here

public class BenchmarkElementwise extends TestCase {
  public void testMuli() {
    int SIZE = 1000;
    int ITERS = 1000000;
    DoubleMatrix x = rand(SIZE);
    DoubleMatrix y = rand(SIZE);
    DoubleMatrix z = zeros(SIZE);
   
    tic("muli():");
    for (int i = 0; i < ITERS; i++)
      x.muli(y, z);
    toc();
View Full Code Here

        super(testName);
    }
   
    @Override
    public void setUp() {
            A = new DoubleMatrix(4, 3,
                    1.0, 2.0, 3.0, 4.0,
                    5.0, 6.0, 7.0, 8.0,
                    9.0, 10.0, 11.0, 12.0).transpose();
    }
View Full Code Here

                    5.0, 6.0, 7.0, 8.0,
                    9.0, 10.0, 11.0, 12.0).transpose();
    }

    public void testGetRows() {
        DoubleMatrix ARows1to2 = new DoubleMatrix(2, 4,
                5.0, 9.0,
                6.0, 10.0,
                7.0, 11.0,
                8.0, 12.0);
        assertEquals(ARows1to2, A.getRows(new IntervalRange(1, 3)));
View Full Code Here

                                                 10.0, 11.0, 12.0);

  @Test
  public void allRange() {
    assertEquals(A, A.get(all(), all()));
    assertEquals(new DoubleMatrix(3, 1, 1.0, 2.0, 3.0), A.get(all(), 0));
    assertEquals(new DoubleMatrix(3, 1, 7.0, 8.0, 9.0), A.get(all(), 2));
  }
View Full Code Here

TOP

Related Classes of org.jblas.DoubleMatrix

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.