Package org.encog.neural.networks.training.propagation.resilient

Examples of org.encog.neural.networks.training.propagation.resilient.ResilientPropagation


            System.out.print(e + "Problem opening file");
        }
    }

    private int trainNeuralNetwork() {
        final Train train = new ResilientPropagation(network, trainingData);

        int epoch = 1;
        do {
            train.iteration();
            //System.out.println("Epoch #" + epoch + " Error: " + train.getError());
            epoch++;
            if (epoch > 500) {
                return 1;
            }
        } while (train.getError() > 0.01);
        return 0;
    }
View Full Code Here


  public void testRPROPCont() {
    MLDataSet trainingSet = XOR.createXORDataSet();
    BasicNetwork net1 = XOR.createUnTrainedXOR();
    BasicNetwork net2 = XOR.createUnTrainedXOR();
   
    ResilientPropagation rprop1 = new ResilientPropagation(net1,trainingSet);
    ResilientPropagation rprop2 = new ResilientPropagation(net2,trainingSet);
   
    rprop1.iteration();
    rprop1.iteration();
   
    rprop2.iteration();
    rprop2.iteration();
   
    TrainingContinuation cont = rprop2.pause();
   
    ResilientPropagation rprop3 = new ResilientPropagation(net2,trainingSet);
    rprop3.resume(cont);
   
    rprop1.iteration();
    rprop3.iteration();
   
   
    for(int i=0;i<net1.getFlat().getWeights().length;i++) {
      Assert.assertEquals(net1.getFlat().getWeights()[i], net2.getFlat().getWeights()[i],0.0001);
    }
View Full Code Here

  public void testRPROPContPersistEG() {
    MLDataSet trainingSet = XOR.createXORDataSet();
    BasicNetwork net1 = XOR.createUnTrainedXOR();
    BasicNetwork net2 = XOR.createUnTrainedXOR();
   
    ResilientPropagation rprop1 = new ResilientPropagation(net1,trainingSet);
    ResilientPropagation rprop2 = new ResilientPropagation(net2,trainingSet);
   
    rprop1.iteration();
    rprop1.iteration();
   
    rprop2.iteration();
    rprop2.iteration();
   
    TrainingContinuation cont = rprop2.pause();
   
    EncogDirectoryPersistence.saveObject(EG_FILENAME, cont);
    TrainingContinuation cont2 = (TrainingContinuation)EncogDirectoryPersistence.loadObject(EG_FILENAME);
   
    ResilientPropagation rprop3 = new ResilientPropagation(net2,trainingSet);
    rprop3.resume(cont2);
   
    rprop1.iteration();
    rprop3.iteration();
   
   
    for(int i=0;i<net1.getFlat().getWeights().length;i++) {
      Assert.assertEquals(net1.getFlat().getWeights()[i], net2.getFlat().getWeights()[i],0.0001);
    }
View Full Code Here

    // train the neural network

    double error = Double.POSITIVE_INFINITY;
    for (int z = 0; z < this.weightTries; z++) {
      network.reset();
      final Propagation train = new ResilientPropagation(network,
          useTraining);
      final StopTrainingStrategy strat = new StopTrainingStrategy(0.001,
          5);

      train.addStrategy(strat);
      train.setNumThreads(1); // force single thread mode

      for (int i = 0; (i < this.iterations) && !getShouldStop()
          && !strat.shouldStop(); i++) {
        train.iteration();
      }

      error = Math.min(error, train.getError());
    }

    if (buffer != null) {
      buffer.close();
    }
View Full Code Here

      // create training data
      MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);

      // train the neural network
      final ResilientPropagation train = new ResilientPropagation(network, trainingSet);
      train.setRPROPType(RPROPType.iRPROPp);

      int epoch = 1;

      do {
        train.iteration();
        epoch++;
      } while (train.getError() > 0.01 && epoch<1000 );
     
      if( epoch>900 ) {
        failureCount++;
      }
    }
View Full Code Here

    return network;
  }
 
  public void train(BasicNetwork network,MLDataSet training)
  {
    final MLTrain train = new ResilientPropagation(network, training);

    int epoch = 1;

    do {
      train.iteration();
      System.out
          .println("Epoch #" + epoch + " Error:" + train.getError());
      epoch++;
    } while(train.getError() > MAX_ERROR);
  }
View Full Code Here

   * @param trainingSet
   *            The training set to use.
   */
  public static void trainDialog(final BasicNetwork network,
      final MLDataSet trainingSet) {
    final Propagation train = new ResilientPropagation(network, trainingSet);
    train.setNumThreads(0);
    TrainingDialog.trainDialog(train, network, trainingSet);
  }
View Full Code Here

   * @param minutes
   *            The number of minutes to train for.
   */
  public static void trainConsole(final BasicNetwork network,
      final MLDataSet trainingSet, final int minutes) {
    final Propagation train = new ResilientPropagation(network, trainingSet);
    train.setNumThreads(0);
    EncogUtility.trainConsole(train, network, trainingSet, minutes);
  }
View Full Code Here

    MLTrain train;

    if (method instanceof SVM) {
      train = new SVMTrain((SVM)method, dataSet);
    } else {
      train = new ResilientPropagation((ContainsFlat)method, dataSet);
    }
    EncogUtility.trainToError(train, error);
  }
View Full Code Here

    network.reset();

    MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);

    // train the neural network
    final MLTrain train = new ResilientPropagation(network, trainingSet);

    do {
      train.iteration();
    } while (train.getError() > 0.009);

    double e = network.calculateError(trainingSet);
    System.out.println("Network traiined to error: " + e);

    System.out.println("Saving network");
View Full Code Here

TOP

Related Classes of org.encog.neural.networks.training.propagation.resilient.ResilientPropagation

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.