Package org.encog.examples.neural.predict.sunspot

Source Code of org.encog.examples.neural.predict.sunspot.PredictSunspot

/*
* Encog(tm) Examples v3.0 - Java Version
* http://www.heatonresearch.com/encog/
* http://code.google.com/p/encog-java/
* Copyright 2008-2011 Heaton Research, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*     http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*  
* For more information on Heaton Research copyrights, licenses
* and trademarks visit:
* http://www.heatonresearch.com/copyright
*/
package org.encog.examples.neural.predict.sunspot;

import java.text.NumberFormat;

import org.encog.ml.data.MLData;
import org.encog.ml.data.MLDataSet;
import org.encog.ml.data.basic.BasicMLData;
import org.encog.ml.train.MLTrain;
import org.encog.neural.networks.BasicNetwork;
import org.encog.neural.networks.layers.BasicLayer;
import org.encog.neural.networks.training.propagation.resilient.ResilientPropagation;
import org.encog.util.EngineArray;
import org.encog.util.arrayutil.NormalizeArray;
import org.encog.util.arrayutil.TemporalWindowArray;

public class PredictSunspot {

  public final static double[] SUNSPOTS = {
            0.02620.05750.08370.12030.18830.3033
            0.15170.10460.05230.04180.01570.0000
            0.00000.01050.05750.14120.24580.3295
            0.31380.20400.14640.13600.11510.0575
            0.10980.20920.40790.63810.53870.3818
            0.24580.18310.05750.02620.08370.1778
            0.36610.42360.58050.52820.38180.2092
            0.10460.08370.02620.05750.11510.2092
            0.31380.42310.43620.24950.25000.1606
            0.06380.05020.05340.17000.24890.2824
            0.32900.44930.32010.23590.19040.1093
            0.05960.19770.36510.55490.52720.4268
            0.34780.18200.16000.03660.10360.4838
            0.80750.65850.44350.35620.20140.1192
            0.05340.12600.43360.69040.68460.6177
            0.47020.34830.31380.24530.21440.1114
            0.08370.03350.02140.03560.07580.1778
            0.23540.22540.24840.22070.14700.0528
            0.04240.01310.00000.00730.02620.0638
            0.07270.18510.23950.21500.15740.1250
            0.08160.03450.02090.00940.04450.0868
            0.18980.25940.33580.35040.37080.2500
            0.14380.04450.06900.29760.63540.7233
            0.53970.44820.33790.19190.12660.0560
            0.07850.20970.32160.51520.65220.5036
            0.34830.33730.28290.20400.10770.0350
            0.02250.11870.28660.49060.50100.4038
            0.30910.23010.24580.15950.08530.0382
            0.19660.38700.72700.58160.53140.3462
            0.23380.08890.05910.06490.01780.0314
            0.16890.28400.31220.33320.33210.2730
            0.13280.06850.03560.03300.03710.1862
            0.38180.44510.40790.33470.21860.1370
            0.13960.06330.04970.01410.02620.1276
            0.21970.33210.28140.32430.25370.2296
            0.09730.02980.01880.00730.05020.2479
            0.29860.54340.42150.33260.19660.1365
            0.07430.03030.08730.23170.33420.3609
            0.40690.33940.18670.11090.05810.0298
            0.04550.18880.41680.59830.57320.4644
            0.35460.24840.16000.08530.05020.1736
            0.48430.79290.71280.70450.43880.3630
            0.16470.07270.02300.19870.74110.9947
            0.96650.83160.58730.28190.19610.1459
            0.05340.07900.24580.49060.55390.5518
            0.54650.34830.36030.19870.18040.0811
            0.06590.14280.48380.8127
          };
 
  public final static int STARTING_YEAR = 1700;
  public final static int WINDOW_SIZE = 30;
  public final static int TRAIN_START = WINDOW_SIZE;
  public final static int TRAIN_END = 259;
  public final static int EVALUATE_START = 260;
  public final static int EVALUATE_END = SUNSPOTS.length-1;
 
  /**
   * This really should be lowered, I am setting it to a level here that will
   * train in under a minute.
   */
  public final static double MAX_ERROR = 0.01;

  private double[] normalizedSunspots;
  private double[] closedLoopSunspots;
 
  public void normalizeSunspots(double lo, double hi) {
        NormalizeArray norm = new NormalizeArray();
        norm.setNormalizedHigh( hi);
        norm.setNormalizedLow( lo);

        // create arrays to hold the normalized sunspots
        normalizedSunspots = norm.process(SUNSPOTS);
        double[] test = norm.process(SUNSPOTS);
        closedLoopSunspots = EngineArray.arrayCopy(normalizedSunspots);

  }
 
  public MLDataSet generateTraining() {
   
    TemporalWindowArray temp = new TemporalWindowArray(WINDOW_SIZE, 1);
    temp.analyze(this.normalizedSunspots);
    return temp.process(this.normalizedSunspots);
  }
 
  public BasicNetwork createNetwork()
  {
    BasicNetwork network = new BasicNetwork();
    network.addLayer(new BasicLayer(WINDOW_SIZE));
    network.addLayer(new BasicLayer(10));
    network.addLayer(new BasicLayer(1));
    network.getStructure().finalizeStructure();
    network.reset();
    return network;
  }
 
  public void train(BasicNetwork network,MLDataSet training)
  {
    final MLTrain train = new ResilientPropagation(network, training);

    int epoch = 1;

    do {
      train.iteration();
      System.out
          .println("Epoch #" + epoch + " Error:" + train.getError());
      epoch++;
    } while(train.getError() > MAX_ERROR);
  }
 
  public void predict(BasicNetwork network)
  {
    NumberFormat f = NumberFormat.getNumberInstance();
    f.setMaximumFractionDigits(4);
    f.setMinimumFractionDigits(4);
   
    System.out.println("Year\tActual\tPredict\tClosed Loop Predict");
   
    for(int year=EVALUATE_START;year<EVALUATE_END;year++)
    {
      // calculate based on actual data
      MLData input = new BasicMLData(WINDOW_SIZE);
      for(int i=0;i<input.size();i++)
      {
        input.setData(i,this.normalizedSunspots[(year-WINDOW_SIZE)+i]);
      }
      MLData output = network.compute(input);
      double prediction = output.getData(0);
      this.closedLoopSunspots[year] = prediction;
     
      // calculate "closed loop", based on predicted data
      for(int i=0;i<input.size();i++)
      {
        input.setData(i,this.closedLoopSunspots[(year-WINDOW_SIZE)+i]);
      }
      output = network.compute(input);
      double closedLoopPrediction = output.getData(0);
     
      // display
      System.out.println((STARTING_YEAR+year)
          +"\t"+f.format(this.normalizedSunspots[year])
          +"\t"+f.format(prediction)
          +"\t"+f.format(closedLoopPrediction)
      );
     
    }
  }
 
  public void run()
  {
    normalizeSunspots(0.1,0.9);
    BasicNetwork network = createNetwork();
    MLDataSet training = generateTraining();
    train(network,training);
    predict(network);
   
  }
 
  public static void main(String args[])
  {
    PredictSunspot sunspot = new PredictSunspot();
    sunspot.run();
  }

}
TOP

Related Classes of org.encog.examples.neural.predict.sunspot.PredictSunspot

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.