Package org.jblas

Examples of org.jblas.DoubleMatrix$RowsAsListView


            return -loss;
        }

        @Override
        public void getValueGradient(double[] arg) {
            DoubleMatrix delta_b = my_y_samples.sub(curr_predict_y);
            DoubleMatrix delta_w = delta_b.transpose().mmul(my_x_samples);
            delta_b = delta_b.columnSums().divi(nbr_samples);
            delta_w.divi(nbr_samples);

            if (my_config.isUseRegularization()) {
                if (0 != my_config.getLamada1()) {
                    delta_w.addi(MatrixFunctions.signum(my_w).mmuli(my_config.getLamada1()));
                    delta_b.addi(MatrixFunctions.signum(my_b).transpose().mmuli(my_config.getLamada1()));
                }
                if (0 != my_config.getLamada2()) {
                    delta_w.addi(my_w.mmul(my_config.getLamada2()));
                    delta_b.addi(my_b.transpose().mmul(my_config.getLamada2()));
                }
            }

            int idx = 0;
            for (int i = 0; i < y_num; i++) {
                for (int j = 0; j < x_num; j++) {
                    arg[idx++] = delta_w.get(i, j);
                }
            }
            for (int i = 0; i < y_num; i++) {
                arg[idx++] = delta_b.get(0, i);
            }
View Full Code Here


    }
  }

  @Override
  protected double loss(List<SampleVector> samples) {
    DoubleMatrix x_samples = MathUtil.convertX2Matrix(samples);
        DoubleMatrix y_samples = MathUtil.convertY2Matrix(samples);
        DoubleMatrix sigmod_output = sigmod_output(x_samples);
    return MatrixFunctions.powi(sigmod_output.sub(y_samples), 2).sum();
  }
View Full Code Here

      /**
       * backward
       */
      // 1 last layer
      DoubleMatrix ai = activation[my_bpparam.nl - 1];
      l_bias[my_bpparam.nl - 1] = ai.sub(my_y_samples).muli(ai).muli(ai.neg().addi(1));
     
      //2 back(no layer0 error need)
      for(int i = my_bpparam.nl - 2; i >= 1; i--) {
        ai = activation[i];
        l_bias[i] = l_bias[i + 1].mmul(my_bpparam.w[i]).muli(ai).muli(ai.neg().addi(1));
      }
     
      /**
       * delta
       */
      int idx = 0;
      for(int i = 0; i < my_bpparam.w.length; i++) {
        DoubleMatrix delta_wi = l_bias[i + 1].transpose().mmul(activation[i]).divi(nbr_samples);
        if(my_config.isUseRegularization()) {
          //for bp, only use L2
          if(0 != my_config.getLamada2()) {
              delta_wi.addi(my_bpparam.w[i].mul(my_config.getLamada2()));
          }
        }
        for(int row = 0; row < delta_wi.rows; row++) {
          for(int col = 0; col < delta_wi.columns; col++) {
            arg[idx++] = -delta_wi.get(row, col);
          }
        }
      }
      for(int i = 0; i < my_bpparam.b.length; i++) {
        DoubleMatrix delta_bi = l_bias[i + 1].columnSums().divi(nbr_samples);
        for(int row = 0; row < delta_bi.rows; row++) {
          for(int col = 0; col < delta_bi.columns; col++) {
            arg[idx++] = -delta_bi.get(row, col);
          }
        }
      }
    }
View Full Code Here

    }

    @Override
    protected void gradientUpdateMiniBatch(SGDTrainConfig config, DoubleMatrix x_samples, DoubleMatrix y_samples, SGDParam curr_param) {
      int nbr_sample = x_samples.getRows();
      DoubleMatrix curr_w = ((HiddenLayerParam)curr_param).w;
      DoubleMatrix curr_hbias = ((HiddenLayerParam)curr_param).hbias;
      DoubleMatrix curr_vbias = ((HiddenLayerParam)curr_param).vbias;
     
      DoubleMatrix v1_sample = x_samples;
      DoubleMatrix h1_probability = new DoubleMatrix(nbr_sample, n_hidden);
      DoubleMatrix h1_sample = new DoubleMatrix(nbr_sample, n_hidden);
      DoubleMatrix v2_probability = new DoubleMatrix(nbr_sample, n_visible);
      DoubleMatrix v2_sample = new DoubleMatrix(nbr_sample, n_visible);
      DoubleMatrix h2_probability = new DoubleMatrix(nbr_sample, n_hidden);
      //DoubleMatrix nh_samples = new DoubleMatrix(nbr_sample, n_hidden);
     
      sample_h_given_v(v1_sample, h1_probability, h1_sample, curr_w, curr_hbias);
      if(config.isUseHintonCD1()) {
          sample_v_given_h(h1_sample, v2_probability, null, curr_w, curr_vbias);
          sample_h_given_v(v2_probability, h2_probability, null, curr_w, curr_hbias);
        }
        else {
            sample_v_given_h(h1_sample, v2_probability, v2_sample, curr_w, curr_vbias);
            sample_h_given_v(v2_sample, h2_probability, null, curr_w, curr_hbias);
        }
     
      DoubleMatrix delta_w = null;
      DoubleMatrix delta_hbias = null;
      DoubleMatrix delta_vbias = null;
     
      if(config.isUseHintonCD1()) {
        delta_w = h1_probability.transpose().mmul(v1_sample).subi(h2_probability.transpose().mmul(v2_probability));
        delta_hbias = h1_probability.sub(h2_probability).columnSums().divi(nbr_sample);
        delta_vbias = v1_sample.sub(v2_probability).columnSums().divi(nbr_sample);
      }
      else {
        delta_w = h1_sample.transpose().mmul(v1_sample).subi(h2_probability.transpose().mmul(v2_sample));
        delta_hbias = h1_sample.sub(h2_probability).columnSums().divi(nbr_sample);
        delta_vbias = v1_sample.sub(v2_sample).columnSums().divi(nbr_sample);
      }

        if (config.isUseRegularization()) {
      //only L2 for RBM
      if (0 != config.getLamada2()) {
        delta_w.subi(curr_w.mul(config.getLamada2()));
            }
    }
        delta_w.divi(nbr_sample);
       
        curr_w.addi(delta_w.muli(config.getLearningRate()));
        curr_hbias.addi(delta_hbias.transpose().muli(config.getLearningRate()));
        curr_vbias.addi(delta_vbias.transpose().muli(config.getLearningRate()));
    }
View Full Code Here

      this(x_feature_num, y_class_num, null, null);
    }
   
    public LRParam(int x_feature_num, int y_class_num, double[][] _w, double[] _b) {
      if (null == _w) {
          w = new DoubleMatrix(y_class_num, x_feature_num);
        } else {
            w = new DoubleMatrix(_w);
        }
        if (null == _b) {
            b = new DoubleMatrix(y_class_num);
        } else {
            b = new DoubleMatrix(_b);
        }
    }
View Full Code Here

        curr_vbias.addi(delta_vbias.transpose().muli(config.getLearningRate()));
    }

    @Override
    protected void gradientUpdateCG(SGDTrainConfig config, DoubleMatrix x_samples, DoubleMatrix y_samples, SGDParam curr_param) {
      DoubleMatrix curr_w = ((HiddenLayerParam)curr_param).w;
      DoubleMatrix curr_hbias = ((HiddenLayerParam)curr_param).hbias;
      DoubleMatrix curr_vbias = ((HiddenLayerParam)curr_param).vbias;
     
      RBMOptimizer rbmopt = new RBMOptimizer(config, x_samples, n_visible, n_hidden, curr_w, curr_hbias, curr_vbias);
        MyConjugateGradient cg = new MyConjugateGradient(rbmopt, config.getCgInitStepSize());
        cg.setTolerance(config.getCgTolerance());
        try {
View Full Code Here

        }
    }

    @Override
    public DoubleMatrix reconstruct(DoubleMatrix input) {
        DoubleMatrix ret = input.mmul(hlparam.w.transpose()).addiRowVector(hlparam.hbias);
        MathUtil.sigmod(ret);
       
        //sample in hidden layer
        for (int i = 0; i < ret.rows; i++) {
      for (int j = 0; j < ret.columns; j++) {
        ret.put(i, j, MathUtil.binomial(1, ret.get(i, j)));
      }
    }
       
        ret = ret.mmul(hlparam.w).addiRowVector(hlparam.vbias);
        MathUtil.sigmod(ret);
        return ret;
    }
View Full Code Here

      int features,
      DoubleMatrix trueRatings,
      double matchThreshold,
      boolean implicitPrefs,
      DoubleMatrix truePrefs) {
    DoubleMatrix predictedU = new DoubleMatrix(users, features);
    List<Tuple2<Object, double[]>> userFeatures = model.userFeatures().toJavaRDD().collect();
    for (int i = 0; i < features; ++i) {
      for (Tuple2<Object, double[]> userFeature : userFeatures) {
        predictedU.put((Integer)userFeature._1(), i, userFeature._2()[i]);
      }
    }
    DoubleMatrix predictedP = new DoubleMatrix(products, features);

    List<Tuple2<Object, double[]>> productFeatures =
      model.productFeatures().toJavaRDD().collect();
    for (int i = 0; i < features; ++i) {
      for (Tuple2<Object, double[]> productFeature : productFeatures) {
        predictedP.put((Integer)productFeature._1(), i, productFeature._2()[i]);
      }
    }

    DoubleMatrix predictedRatings = predictedU.mmul(predictedP.transpose());

    if (!implicitPrefs) {
      for (int u = 0; u < users; ++u) {
        for (int p = 0; p < products; ++p) {
          double prediction = predictedRatings.get(u, p);
          double correct = trueRatings.get(u, p);
          Assert.assertTrue(String.format("Prediction=%2.4f not below match threshold of %2.2f",
                  prediction, matchThreshold), Math.abs(prediction - correct) < matchThreshold);
        }
      }
    } else {
      // For implicit prefs we use the confidence-weighted RMSE to test
      // (ref Mahout's implicit ALS tests)
      double sqErr = 0.0;
      double denom = 0.0;
      for (int u = 0; u < users; ++u) {
        for (int p = 0; p < products; ++p) {
          double prediction = predictedRatings.get(u, p);
          double truePref = truePrefs.get(u, p);
          double confidence = 1.0 + /* alpha = */ 1.0 * Math.abs(trueRatings.get(u, p));
          double err = confidence * (truePref - prediction) * (truePref - prediction);
          sqErr += err;
          denom += confidence;
View Full Code Here

  }

  List<LabeledPoint> generateRidgeData(int numPoints, int numFeatures, double std) {
    org.jblas.util.Random.seed(42);
    // Pick weights as random values distributed uniformly in [-0.5, 0.5]
    DoubleMatrix w = DoubleMatrix.rand(numFeatures, 1).subi(0.5);
    return LinearDataGenerator.generateLinearInputAsList(0.0, w.data, numPoints, 42, std);
  }
View Full Code Here

        print_perf("rand_mat_stat", tmin);

        tmin = Long.MAX_VALUE;
        for (int i=0; i<NITER; ++i) {
            t = System.nanoTime();
            DoubleMatrix C = randmatmul_JBLAS(1000);
            assert(0 <= C.get(0));
            t = System.nanoTime()-t;
            if (t < tmin) tmin = t;
        }
        print_perf("rand_mat_mul", tmin);
View Full Code Here

TOP

Related Classes of org.jblas.DoubleMatrix$RowsAsListView

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.