Package org.encog.mathutil.randomize

Examples of org.encog.mathutil.randomize.Randomizer


   */
  private void randomizeNeuron(final int targetLayer, final int neuron,
      final boolean useRange, final double low, final double high,
      final boolean usePercent, final double percent) {

    final Randomizer d;

    if (useRange) {
      d = new RangeRandomizer(low, high);
    } else {
      d = new Distort(percent);
    }

    // check for errors
    this.network.validateNeuron(targetLayer, neuron);

    // access the flat network
    final FlatNetwork flat = this.network.getStructure().getFlat();

    // allocate new weights now that we know how big the new weights will be
    final double[] newWeights = new double[flat.getWeights().length];

    // construct the new weights
    int weightsIndex = 0;

    for (int fromLayer = flat.getLayerCounts().length - 2; fromLayer >= 0; fromLayer--) {
      final int fromNeuronCount = this.network
          .getLayerTotalNeuronCount(fromLayer);
      final int toNeuronCount = this.network
          .getLayerNeuronCount(fromLayer + 1);
      final int toLayer = fromLayer + 1;

      for (int toNeuron = 0; toNeuron < toNeuronCount; toNeuron++) {
        for (int fromNeuron = 0; fromNeuron < fromNeuronCount; fromNeuron++) {
          boolean randomize = false;
          if ((toLayer == targetLayer) && (toNeuron == neuron)) {
            randomize = true;
          } else if ((fromLayer == targetLayer)
              && (fromNeuron == neuron)) {
            randomize = true;
          }

          double weight = this.network.getWeight(fromLayer,
              fromNeuron, toNeuron);

          if (randomize) {
            weight = d.randomize(weight);
          }

          newWeights[weightsIndex++] = weight;
        }
      }
View Full Code Here


  }

  public static FlatNetwork createNetwork() {
    BasicNetwork network = EncogUtility
        .simpleFeedForward(2, 4, 0, 1, false);
    Randomizer randomizer = new ConsistentRandomizer(-1, 1);
    randomizer.randomize(network);
    return network.getStructure().getFlat().clone();
  }
View Full Code Here

  public void testCompleteTrain()
  {
    MLDataSet trainingData = new BasicMLDataSet(XOR.XOR_INPUT,XOR.XOR_IDEAL);
   
    BasicNetwork network = EncogUtility.simpleFeedForward(2, 5, 7, 1, true);
    Randomizer randomizer = new ConsistentRandomizer(-1, 1, 19);
    //randomizer.randomize(network);
    System.out.println(network.dumpWeights());
    MLTrain rprop = new ResilientPropagation(network, trainingData);
    int iteration = 0;
    do {
View Full Code Here

    g.randomize((BasicNetwork) this.method);
    setDirty(true);
  }

  private void optionRandomize(RandomizeNetworkDialog dialog) {
    Randomizer r = null;

    switch (dialog.getType().getSelectedIndex()) {
    case 0: // Random
      r = new RangeRandomizer(dialog.getLow().getValue(), dialog
          .getHigh().getValue());
      break;
    case 1: // Nguyen-Widrow
      r = new NguyenWidrowRandomizer(dialog.getLow().getValue(), dialog
          .getHigh().getValue());
      break;
    case 2: // Fan in
      r = new FanInRandomizer(dialog.getLow().getValue(), dialog
          .getHigh().getValue(), false);
      break;
    }

    if (r != null) {
      r.randomize((BasicNetwork) this.method);
      setDirty(true);
    }
  }
View Full Code Here

   */
  private void randomizeNeuron(final int targetLayer, final int neuron,
      final boolean useRange, final double low, final double high,
      final boolean usePercent, final double percent) {

    final Randomizer d;

    if (useRange) {
      d = new RangeRandomizer(low, high);
    } else {
      d = new Distort(percent);
    }

    // check for errors
    this.network.validateNeuron(targetLayer, neuron);

    // access the flat network
    final FlatNetwork flat = this.network.getStructure().getFlat();

    // allocate new weights now that we know how big the new weights will be
    final double[] newWeights = new double[flat.getWeights().length];

    // construct the new weights
    int weightsIndex = 0;

    for (int fromLayer = flat.getLayerCounts().length - 2; fromLayer >= 0; fromLayer--) {
      final int fromNeuronCount = this.network
          .getLayerTotalNeuronCount(fromLayer);
      final int toNeuronCount = this.network
          .getLayerNeuronCount(fromLayer + 1);
      final int toLayer = fromLayer + 1;

      for (int toNeuron = 0; toNeuron < toNeuronCount; toNeuron++) {
        for (int fromNeuron = 0; fromNeuron < fromNeuronCount; fromNeuron++) {
          boolean randomize = false;
          if ((toLayer == targetLayer) && (toNeuron == neuron)) {
            randomize = true;
          } else if ((fromLayer == targetLayer)
              && (fromNeuron == neuron)) {
            randomize = true;
          }

          double weight = this.network.getWeight(fromLayer,
              fromNeuron, toNeuron);

          if (randomize) {
            weight = d.randomize(weight);
          }

          newWeights[weightsIndex++] = weight;
        }
      }
View Full Code Here

    final int particles = holder.getInt(
        MLTrainFactory.PROPERTY_PARTICLES, false, 20);
   
    CalculateScore score = new TrainingSetScore(training);
    Randomizer randomizer = new NguyenWidrowRandomizer();
   
    final MLTrain train = new NeuralPSO((BasicNetwork)method,randomizer,score,particles);
   
    return train;
  }
View Full Code Here

TOP

Related Classes of org.encog.mathutil.randomize.Randomizer

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.