Package de.jungblut.math.minimize

Examples of de.jungblut.math.minimize.CostFunction


   * @param theta initial spot to start the minimizations.
   * @return the cost of the training.
   */
  public final double train(DoubleVector[] features, DoubleVector[] outcome,
      Minimizer minimizer, int maxIterations, double lambda, boolean verbose) {
    CostFunction costFunction = new MultilayerPerceptronCostFunction(this,
        features, outcome);
    return trainInternal(minimizer, maxIterations, verbose, costFunction,
        getFoldedThetaVector());
  }
View Full Code Here


  @Test
  public void testNumericalGradient() {
    // our function is f(x,y) = x^2+y^2
    // the derivative is f'(x,y) = 2x+2y
    final CostFunction inlineFunction = new CostFunction() {
      @Override
      public CostGradientTuple evaluateCost(DoubleVector input) {

        double cost = Math.pow(input.get(0), 2) + Math.pow(input.get(1), 2);
        DenseDoubleVector gradient = new DenseDoubleVector(new double[] {
            input.get(0) * 2, input.get(1) * 2 });

        return new CostGradientTuple(cost, gradient);
      }
    };
    DenseDoubleVector v = new DenseDoubleVector(new double[] { 0, 1 });
    CostGradientTuple cost = inlineFunction.evaluateCost(v);
    DoubleVector numericalGradient = MathUtils.numericalGradient(v,
        inlineFunction);
    assertSmallDiff(numericalGradient, cost.getGradient());

    v = new DenseDoubleVector(new double[] { -15, 100 });
    cost = inlineFunction.evaluateCost(v);

    numericalGradient = MathUtils.numericalGradient(v, inlineFunction);
    assertSmallDiff(numericalGradient, cost.getGradient());
  }
View Full Code Here

TOP

Related Classes of de.jungblut.math.minimize.CostFunction

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.