Package org.apache.mahout.math

Examples of org.apache.mahout.math.Matrix.times()


   * @return The intermediate results of the current layer.
   */
  protected Vector forward(int fromLayer, Vector intermediateOutput) {
    Matrix weightMatrix = weightMatrixList.get(fromLayer);

    Vector vec = weightMatrix.times(intermediateOutput);
    vec = vec.assign(NeuralNetworkFunctions.getDoubleFunction(squashingFunctionList.get(fromLayer)));

    // add bias
    Vector vecWithBias = new DenseVector(vec.size() + 1);
    vecWithBias.set(0, 1);
View Full Code Here


    v.assign(1.0);
    Matrix m = SolverTest.randomSequentialAccessSparseMatrix(100, 90, 50, 20, 1.0);
    DistributedRowMatrix dm = randomDistributedMatrix(100, 90, 50, 20, 1.0, false);
    dm.setConf(getConfiguration());

    Vector expected = m.times(v);
    Vector actual = dm.times(v);
    assertEquals(0.0, expected.getDistanceSquared(actual), EPSILON);
  }

  @Test
View Full Code Here

                                                      boolean isSymmetric,
                                                      String baseTmpDirSuffix) throws IOException {
    Path baseTmpDirPath = getTestTempDirPath(baseTmpDirSuffix);
    Matrix c = SolverTest.randomSequentialAccessSparseMatrix(numRows, nonNullRows, numCols, entriesPerRow, entryMean);
    if (isSymmetric) {
      c = c.times(c.transpose());
    }
    return saveToFs(c, baseTmpDirPath);
  }

  private DistributedRowMatrix saveToFs(final Matrix m, Path baseTmpDirPath) throws IOException {
View Full Code Here

    v.set(0, 0.1);
    v.set(1, 0.1);
    //v.set(2, 0.1);
   
    // returns the dot product for each row in the matrix as a vector
    Vector v2 = m.times(v);
   
    //System.out.println(">>>>>> " + v.size());
    System.out.println("> " + m.get(0, 0) );
    System.out.println("> " + m.get(0, 1) );
//    System.out.println("> " + m.get(1, 0) );
View Full Code Here

    Vector v = new DenseVector(2);
    v.set(0, 0.1);
    v.set(1, 0.1);
   
    // returns the dot product for each row in the matrix as a vector
    Vector v2 = m.times(v);
   
    System.out.println("> " + m.get(0, 0) );
    System.out.println("> " + m.get(0, 1) );
   
    System.out.println("> Vector out ------ " );
View Full Code Here

   
    if (momentum != 0) {
   
      //wGradient.muli( 1 - momentum);
     
      wGradient = wGradient.times( 1 - momentum );
     
    }

   
    // This is added normalization for mini-batching
View Full Code Here

    //Matrix dy = y.sub(p_y_given_x);
    Matrix dy = labels.minus(p_LabelsGivenInput);

    //connectionWeights = connectionWeights.add(x.transpose().mmul(dy).mul(lr));   
    Matrix baseConnectionUpdate = input.transpose().times(dy);
    this.connectionWeights = this.connectionWeights.plus( baseConnectionUpdate.times(lr) );
   
    //biasTerms = biasTerms.add(dy.columnMeans().mul(lr));
    this.biasTerms = this.biasTerms.plus( MatrixUtils.columnMeans(dy).times(lr) );

  }
View Full Code Here

    //Matrix dy = y.sub(p_y_given_x);
    Matrix dy = labels.minus(p_LabelsGivenInput);

    //connectionWeights = connectionWeights.add(x.transpose().mmul(dy).mul(lr));   
    Matrix baseConnectionUpdate = input.transpose().times(dy);
    this.connectionWeights = this.connectionWeights.plus( baseConnectionUpdate.times(lr) );
   
    //biasTerms = biasTerms.add(dy.columnMeans().mul(lr));
    this.biasTerms = this.biasTerms.plus( MatrixUtils.columnMeans(dy).times(lr) );

  }
View Full Code Here

                    add = MatrixUtils.elementWiseMultiplication(add, this.preTrainingLayers[ l ].getConnectionWeights().times( l2 ));

                }


                this.preTrainingLayers[ l ].setConnectionWeights( this.preTrainingLayers[ l ].getConnectionWeights().minus( add.times( lr ) ) );

                this.hiddenLayers[ l ].connectionWeights = this.preTrainingLayers[l].getConnectionWeights();
                Matrix deltaColumnSums = MatrixUtils.columnSums( deltas.get( l + 1 ).getSecond() );

                // TODO: check this, needs to happen in place?
View Full Code Here

                add = MatrixUtils.elementWiseMultiplication(add,this.preTrainingLayers[ l ].getAdaGrad().getLearningRates( add ));

            } else {

                //add.muli(lr);
                add = add.times( lr );

            }

            //add.divi(input.rows);
            if(normalizeByInputRows)
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.