Package org.encog.neural.networks.training.propagation.resilient

Examples of org.encog.neural.networks.training.propagation.resilient.ResilientPropagation


    if (method instanceof SVM) {
      train = new SVMTrain((SVM)method, dataSet);
    } if(method instanceof FreeformNetwork ) {
      train = new FreeformResilientPropagation((FreeformNetwork) method, dataSet);
    } else {
      train = new ResilientPropagation((ContainsFlat)method, dataSet);
    }
    EncogUtility.trainToError(train, error);
  }
View Full Code Here


    // train the neural network

    double error = Double.POSITIVE_INFINITY;
    for (int z = 0; z < this.weightTries; z++) {
      network.reset();
      final Propagation train = new ResilientPropagation(network,
          useTraining);
      final StopTrainingStrategy strat = new StopTrainingStrategy(0.001,
          5);

      train.addStrategy(strat);
      train.setThreadCount(1); // force single thread mode

      for (int i = 0; (i < this.iterations) && !getShouldStop()
          && !strat.shouldStop(); i++) {
        train.iteration();
      }

      error = Math.min(error, train.getError());
    }

    if (buffer != null) {
      buffer.close();
    }
View Full Code Here

   * @param trainingSet
   *            The training set to use.
   */
  public static void trainDialog(final BasicNetwork network,
      final MLDataSet trainingSet) {
    final Propagation train = new ResilientPropagation(network, trainingSet);
    train.setThreadCount(0);
    TrainingDialog.trainDialog(train, network, trainingSet);
  }
View Full Code Here

  public void testRPROPCont() {
    MLDataSet trainingSet = XOR.createXORDataSet();
    BasicNetwork net1 = XOR.createUnTrainedXOR();
    BasicNetwork net2 = XOR.createUnTrainedXOR();
   
    ResilientPropagation rprop1 = new ResilientPropagation(net1,trainingSet);
    ResilientPropagation rprop2 = new ResilientPropagation(net2,trainingSet);
   
    rprop1.iteration();
    rprop1.iteration();
   
    rprop2.iteration();
    rprop2.iteration();
   
    TrainingContinuation cont = rprop2.pause();
   
    ResilientPropagation rprop3 = new ResilientPropagation(net2,trainingSet);
    rprop3.resume(cont);
   
    rprop1.iteration();
    rprop3.iteration();
   
   
    for(int i=0;i<net1.getFlat().getWeights().length;i++) {
      Assert.assertEquals(net1.getFlat().getWeights()[i], net2.getFlat().getWeights()[i],0.0001);
    }
View Full Code Here

  public void testRPROPContPersistEG() {
    MLDataSet trainingSet = XOR.createXORDataSet();
    BasicNetwork net1 = XOR.createUnTrainedXOR();
    BasicNetwork net2 = XOR.createUnTrainedXOR();
   
    ResilientPropagation rprop1 = new ResilientPropagation(net1,trainingSet);
    ResilientPropagation rprop2 = new ResilientPropagation(net2,trainingSet);
   
    rprop1.iteration();
    rprop1.iteration();
   
    rprop2.iteration();
    rprop2.iteration();
   
    TrainingContinuation cont = rprop2.pause();
   
    EncogDirectoryPersistence.saveObject(EG_FILENAME, cont);
    TrainingContinuation cont2 = (TrainingContinuation)EncogDirectoryPersistence.loadObject(EG_FILENAME);
   
    ResilientPropagation rprop3 = new ResilientPropagation(net2,trainingSet);
    rprop3.resume(cont2);
   
    rprop1.iteration();
    rprop3.iteration();
   
   
    for(int i=0;i<net1.getFlat().getWeights().length;i++) {
      Assert.assertEquals(net1.getFlat().getWeights()[i], net2.getFlat().getWeights()[i],0.0001);
    }
View Full Code Here

  public void testRPROP() throws Throwable
  {
    MLDataSet trainingData = new BasicMLDataSet(XOR.XOR_INPUT,XOR.XOR_IDEAL);
   
    BasicNetwork network = NetworkUtil.createXORNetworkUntrained();
    MLTrain rprop = new ResilientPropagation(network, trainingData);
    NetworkUtil.testTraining(trainingData,rprop,0.03);
  }
View Full Code Here

    MLDataSet trainingData = XOR.createNoisyXORDataSet(10);
   
    BasicNetwork network = NetworkUtil.createXORNetworkUntrained();
   
    final FoldedDataSet folded = new FoldedDataSet(trainingData);
    final MLTrain train = new ResilientPropagation(network, folded);
    final CrossValidationKFold trainFolded = new CrossValidationKFold(train,4);
   
    EncogUtility.trainToError(trainFolded, 0.2);
   
    XOR.verifyXOR((MLRegression)trainFolded.getMethod(), 0.2);
View Full Code Here

  public void testLimited()
  {
    MLDataSet trainingData = new BasicMLDataSet(XOR.XOR_INPUT,XOR.XOR_IDEAL);
    BasicNetwork network = NetworkUtil.createXORNetworkUntrained();   
   
    ResilientPropagation rprop = new ResilientPropagation(network,trainingData);
    rprop.iteration();
    rprop.iteration();
    network.enableConnection(1, 0, 0, false);
    network.enableConnection(1, 1, 0, false);
   
    Assert.assertTrue(network.getStructure().isConnectionLimited());
   
    Assert.assertEquals(0.0, network.getStructure().getFlat().getWeights()[0], 0.01);
    Assert.assertEquals(0.0, network.getStructure().getFlat().getWeights()[1], 0.01);
    rprop.iteration();
    rprop.iteration();
    rprop.iteration();
    rprop.iteration();
    // these connections were removed, and should not have been "trained"
    Assert.assertEquals(0.0, network.getStructure().getFlat().getWeights()[0], 0.01);
    Assert.assertEquals(0.0, network.getStructure().getFlat().getWeights()[1], 0.01);   
    rprop.finishTraining();
  }
View Full Code Here

 
  public void testRPROPConsistency() {
    MLDataSet training = EncoderTrainingFactory.generateTraining(4, false);
    BasicNetwork network = EncogUtility.simpleFeedForward(4, 2, 0, 4, true);
    (new ConsistentRandomizer(-1,1,50)).randomize(network);
    ResilientPropagation rprop = new ResilientPropagation(network,training);
    for(int i=0;i<5;i++) {
      rprop.iteration();
    }
    Assert.assertArrayEquals(EXPECTED_WEIGHTS1, network.getFlat().getWeights(),0.0001);
    for(int i=0;i<5;i++) {
      rprop.iteration();
    }
    Assert.assertArrayEquals(EXPECTED_WEIGHTS2, network.getFlat().getWeights(),0.0001)
   
    double e = network.calculateError(training);
    Assert.assertEquals(0.0767386807494191, e, 0.00001);
View Full Code Here

    network2.setBiasActivation(-1);
    network3.setBiasActivation(0.5);
   
    MLDataSet trainingData = new BasicMLDataSet(XOR.XOR_INPUT,XOR.XOR_IDEAL);
   
    MLTrain rprop1 = new ResilientPropagation(network1, trainingData);
    MLTrain rprop2 = new ResilientPropagation(network2, trainingData);
    MLTrain rprop3 = new ResilientPropagation(network3, trainingData);

    NetworkUtil.testTraining(trainingData,rprop1,0.03);
    NetworkUtil.testTraining(trainingData,rprop2,0.01);
    NetworkUtil.testTraining(trainingData,rprop3,0.01);
   
View Full Code Here

TOP

Related Classes of org.encog.neural.networks.training.propagation.resilient.ResilientPropagation

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.