Package org.encog.neural.flat

Examples of org.encog.neural.flat.FlatNetwork


   * {@inheritDoc}
   */
  @Override
  public final Object read(final InputStream is) {
    final BasicNetwork result = new BasicNetwork();
    final FlatNetwork flat = new FlatNetwork();
    final EncogReadHelper in = new EncogReadHelper(is);
    EncogFileSection section;

    while ((section = in.readNextSection()) != null) {
      if (section.getSectionName().equals("BASIC")
          && section.getSubSectionName().equals("PARAMS")) {
        final Map<String, String> params = section.parseParams();
        result.getProperties().putAll(params);
      }
      if (section.getSectionName().equals("BASIC")
          && section.getSubSectionName().equals("NETWORK")) {
        final Map<String, String> params = section.parseParams();

        flat.setBeginTraining(EncogFileSection.parseInt(params,
            BasicNetwork.TAG_BEGIN_TRAINING));
        flat.setConnectionLimit(EncogFileSection.parseDouble(params,
            BasicNetwork.TAG_CONNECTION_LIMIT));
        flat.setContextTargetOffset(EncogFileSection.parseIntArray(
            params, BasicNetwork.TAG_CONTEXT_TARGET_OFFSET));
        flat.setContextTargetSize(EncogFileSection.parseIntArray(
            params, BasicNetwork.TAG_CONTEXT_TARGET_SIZE));
        flat.setEndTraining(EncogFileSection.parseInt(params,
            BasicNetwork.TAG_END_TRAINING));
        flat.setHasContext(EncogFileSection.parseBoolean(params,
            BasicNetwork.TAG_HAS_CONTEXT));
        flat.setInputCount(EncogFileSection.parseInt(params,
            PersistConst.INPUT_COUNT));
        flat.setLayerCounts(EncogFileSection.parseIntArray(params,
            BasicNetwork.TAG_LAYER_COUNTS));
        flat.setLayerFeedCounts(EncogFileSection.parseIntArray(params,
            BasicNetwork.TAG_LAYER_FEED_COUNTS));
        flat.setLayerContextCount(EncogFileSection.parseIntArray(
            params, BasicNetwork.TAG_LAYER_CONTEXT_COUNT));
        flat.setLayerIndex(EncogFileSection.parseIntArray(params,
            BasicNetwork.TAG_LAYER_INDEX));
        flat.setLayerOutput(EncogFileSection.parseDoubleArray(params,
            PersistConst.OUTPUT));
        flat.setLayerSums(new double[flat.getLayerOutput().length]);
        flat.setOutputCount(EncogFileSection.parseInt(params,
            PersistConst.OUTPUT_COUNT));
        flat.setWeightIndex(EncogFileSection.parseIntArray(params,
            BasicNetwork.TAG_WEIGHT_INDEX));
        flat.setWeights(EncogFileSection.parseDoubleArray(params,
            PersistConst.WEIGHTS));
        flat.setBiasActivation(EncogFileSection.parseDoubleArray(
            params, BasicNetwork.TAG_BIAS_ACTIVATION));
      } else if (section.getSectionName().equals("BASIC")
          && section.getSubSectionName().equals("ACTIVATION")) {
        int index = 0;

        flat.setActivationFunctions(new ActivationFunction[flat
            .getLayerCounts().length]);

        for (final String line : section.getLines()) {
          ActivationFunction af = null;
          final List<String> cols = EncogFileSection
              .splitColumns(line);
          final String name = "org.encog.engine.network.activation."
              + cols.get(0);
          try {
            final Class<?> clazz = Class.forName(name);
            af = (ActivationFunction) clazz.newInstance();
          } catch (final ClassNotFoundException e) {
            throw new PersistError(e);
          } catch (final InstantiationException e) {
            throw new PersistError(e);
          } catch (final IllegalAccessException e) {
            throw new PersistError(e);
          }

          for (int i = 0; i < af.getParamNames().length; i++) {
            af.setParam(i,
                CSVFormat.EG_FORMAT.parse(cols.get(i + 1)));
          }

          flat.getActivationFunctions()[index++] = af;
        }
      }
    }

    result.getStructure().setFlat(flat);
View Full Code Here


   */
  @Override
  public final void save(final OutputStream os, final Object obj) {
    final EncogWriteHelper out = new EncogWriteHelper(os);
    final BasicNetwork net = (BasicNetwork) obj;
    final FlatNetwork flat = net.getStructure().getFlat();
    out.addSection("BASIC");
    out.addSubSection("PARAMS");
    out.addProperties(net.getProperties());
    out.addSubSection("NETWORK");

    out.writeProperty(BasicNetwork.TAG_BEGIN_TRAINING,
        flat.getBeginTraining());
    out.writeProperty(BasicNetwork.TAG_CONNECTION_LIMIT,
        flat.getConnectionLimit());
    out.writeProperty(BasicNetwork.TAG_CONTEXT_TARGET_OFFSET,
        flat.getContextTargetOffset());
    out.writeProperty(BasicNetwork.TAG_CONTEXT_TARGET_SIZE,
        flat.getContextTargetSize());
    out.writeProperty(BasicNetwork.TAG_END_TRAINING, flat.getEndTraining());
    out.writeProperty(BasicNetwork.TAG_HAS_CONTEXT, flat.getHasContext());
    out.writeProperty(PersistConst.INPUT_COUNT, flat.getInputCount());
    out.writeProperty(BasicNetwork.TAG_LAYER_COUNTS, flat.getLayerCounts());
    out.writeProperty(BasicNetwork.TAG_LAYER_FEED_COUNTS,
        flat.getLayerFeedCounts());
    out.writeProperty(BasicNetwork.TAG_LAYER_CONTEXT_COUNT,
        flat.getLayerContextCount());
    out.writeProperty(BasicNetwork.TAG_LAYER_INDEX, flat.getLayerIndex());
    out.writeProperty(PersistConst.OUTPUT, flat.getLayerOutput());
    out.writeProperty(PersistConst.OUTPUT_COUNT, flat.getOutputCount());
    out.writeProperty(BasicNetwork.TAG_WEIGHT_INDEX, flat.getWeightIndex());
    out.writeProperty(PersistConst.WEIGHTS, flat.getWeights());
    out.writeProperty(BasicNetwork.TAG_BIAS_ACTIVATION,
        flat.getBiasActivation());
    out.addSubSection("ACTIVATION");
    for (final ActivationFunction af : flat.getActivationFunctions()) {
      out.addColumn(af.getClass().getSimpleName());
      for (int i = 0; i < af.getParams().length; i++) {
        out.addColumn(af.getParams()[i]);
      }
      out.writeLine();
View Full Code Here

          "New neuron count is either a decrease or no change: "
              + neuronCount);
    }

    // access the flat network
    final FlatNetwork flat = this.network.getStructure().getFlat();
    final double[] oldWeights = flat.getWeights();

    // first find out how many connections there will be after this prune.
    int connections = oldWeights.length;
    int inBoundConnections = 0;
    int outBoundConnections = 0;

    // are connections added from the previous layer?
    if (targetLayer > 0) {
      inBoundConnections = this.network
          .getLayerTotalNeuronCount(targetLayer - 1);
      connections += inBoundConnections * increaseBy;
    }

    // are there connections added from the next layer?
    if (targetLayer < (this.network.getLayerCount() - 1)) {
      outBoundConnections = this.network
          .getLayerNeuronCount(targetLayer + 1);
      connections += outBoundConnections * increaseBy;
    }

    // increase layer count
    final int flatLayer = this.network.getLayerCount() - targetLayer - 1;
    flat.getLayerCounts()[flatLayer] += increaseBy;
    flat.getLayerFeedCounts()[flatLayer] += increaseBy;

    // allocate new weights now that we know how big the new weights will be
    final double[] newWeights = new double[connections];

    // construct the new weights
    int weightsIndex = 0;

    for (int fromLayer = flat.getLayerCounts().length - 2; fromLayer >= 0; fromLayer--) {
      final int fromNeuronCount = this.network
          .getLayerTotalNeuronCount(fromLayer);
      final int toNeuronCount = this.network
          .getLayerNeuronCount(fromLayer + 1);
      final int toLayer = fromLayer + 1;

      for (int toNeuron = 0; toNeuron < toNeuronCount; toNeuron++) {
        for (int fromNeuron = 0; fromNeuron < fromNeuronCount; fromNeuron++) {
          if ((toLayer == targetLayer)
              && (toNeuron >= oldNeuronCount)) {
            newWeights[weightsIndex++] = 0;
          } else if ((fromLayer == targetLayer)
              && (fromNeuron > oldNeuronCount)) {
            newWeights[weightsIndex++] = 0;
          } else {
            newWeights[weightsIndex++] = this.network.getWeight(
                fromLayer, fromNeuron, toNeuron);
          }
        }
      }
    }

    // swap in the new weights
    flat.setWeights(newWeights);

    // reindex
    reindexNetwork();
  }
View Full Code Here

      throw new NeuralNetworkError(
          "A layer must have at least a single neuron.  If you want to remove the entire layer you must create a new network.");
    }

    // access the flat network
    final FlatNetwork flat = this.network.getStructure().getFlat();
    final double[] oldWeights = flat.getWeights();

    // first find out how many connections there will be after this prune.
    int connections = oldWeights.length;
    int inBoundConnections = 0;
    int outBoundConnections = 0;

    // are connections removed from the previous layer?
    if (targetLayer > 0) {
      inBoundConnections = this.network
          .getLayerTotalNeuronCount(targetLayer - 1);
      connections -= inBoundConnections;
    }

    // are there connections removed from the next layer?
    if (targetLayer < (this.network.getLayerCount() - 1)) {
      outBoundConnections = this.network
          .getLayerNeuronCount(targetLayer + 1);
      connections -= outBoundConnections;
    }

    // allocate new weights now that we know how big the new weights will be
    final double[] newWeights = new double[connections];

    // construct the new weights
    int weightsIndex = 0;

    for (int fromLayer = flat.getLayerCounts().length - 2; fromLayer >= 0; fromLayer--) {
      final int fromNeuronCount = this.network
          .getLayerTotalNeuronCount(fromLayer);
      final int toNeuronCount = this.network
          .getLayerNeuronCount(fromLayer + 1);
      final int toLayer = fromLayer + 1;

      for (int toNeuron = 0; toNeuron < toNeuronCount; toNeuron++) {
        for (int fromNeuron = 0; fromNeuron < fromNeuronCount; fromNeuron++) {
          boolean skip = false;
          if ((toLayer == targetLayer) && (toNeuron == neuron)) {
            skip = true;
          } else if ((fromLayer == targetLayer)
              && (fromNeuron == neuron)) {
            skip = true;
          }

          if (!skip) {
            newWeights[weightsIndex++] = this.network.getWeight(
                fromLayer, fromNeuron, toNeuron);
          }
        }
      }
    }

    // swap in the new weights
    flat.setWeights(newWeights);

    // decrease layer count
    final int flatLayer = this.network.getLayerCount() - targetLayer - 1;
    flat.getLayerCounts()[flatLayer]--;
    flat.getLayerFeedCounts()[flatLayer]--;

    // reindex
    reindexNetwork();

  }
View Full Code Here

   *            The target layer.
   * @param neuron
   *            The target neuron.
   */
  public final void randomizeNeuron(final int targetLayer, final int neuron) {
    final FlatNetwork flat = this.network.getStructure().getFlat();
    final double low = EngineArray.min(flat.getWeights());
    final double high = EngineArray.max(flat.getWeights());
    randomizeNeuron(targetLayer, neuron, true, low, high, false, 0.0);
  }
View Full Code Here

    // check for errors
    this.network.validateNeuron(targetLayer, neuron);

    // access the flat network
    final FlatNetwork flat = this.network.getStructure().getFlat();

    // allocate new weights now that we know how big the new weights will be
    final double[] newWeights = new double[flat.getWeights().length];

    // construct the new weights
    int weightsIndex = 0;

    for (int fromLayer = flat.getLayerCounts().length - 2; fromLayer >= 0; fromLayer--) {
      final int fromNeuronCount = this.network
          .getLayerTotalNeuronCount(fromLayer);
      final int toNeuronCount = this.network
          .getLayerNeuronCount(fromLayer + 1);
      final int toLayer = fromLayer + 1;

      for (int toNeuron = 0; toNeuron < toNeuronCount; toNeuron++) {
        for (int fromNeuron = 0; fromNeuron < fromNeuronCount; fromNeuron++) {
          boolean randomize = false;
          if ((toLayer == targetLayer) && (toNeuron == neuron)) {
            randomize = true;
          } else if ((fromLayer == targetLayer)
              && (fromNeuron == neuron)) {
            randomize = true;
          }

          double weight = this.network.getWeight(fromLayer,
              fromNeuron, toNeuron);

          if (randomize) {
            weight = d.randomize(weight);
          }

          newWeights[weightsIndex++] = weight;
        }
      }
    }

    // swap in the new weights
    flat.setWeights(newWeights);

  }
View Full Code Here

  /**
   * Creat new index values for the network.
   */
  private void reindexNetwork() {
    final FlatNetwork flat = this.network.getStructure().getFlat();

    int neuronCount = 0;
    int weightCount = 0;
    for (int i = 0; i < flat.getLayerCounts().length; i++) {
      if (i > 0) {
        final int from = flat.getLayerFeedCounts()[i - 1];
        final int to = flat.getLayerCounts()[i];
        weightCount += from * to;
      }
      flat.getLayerIndex()[i] = neuronCount;
      flat.getWeightIndex()[i] = weightCount;
      neuronCount += flat.getLayerCounts()[i];
    }

    flat.setLayerOutput(new double[neuronCount]);
    flat.clearContext();

    flat.setInputCount(flat.getLayerFeedCounts()[flat.getLayerCounts().length - 1]);
    flat.setOutputCount(flat.getLayerFeedCounts()[0]);
  }
View Full Code Here

      }

      flatLayers[i] = layer;
    }

    this.flat = new FlatNetwork(flatLayers);

    finalizeLimit();
    this.layers.clear();
    enforceLimit();
  }
View Full Code Here

    return sw.getElapsedMilliseconds();
  }

  public static long BenchmarkEncogFlat(double[][] input, double[][] output) {
    FlatNetwork network = new FlatNetwork(input[0].length, HIDDEN_COUNT, 0,
        output[0].length, false);
    network.randomize();
    BasicMLDataSet trainingSet = new BasicMLDataSet(input, output);

    TrainFlatNetworkBackPropagation train = new TrainFlatNetworkBackPropagation(
        network, trainingSet, 0.7, 0.7);
View Full Code Here

  public static void main(String[] args) {
   
    MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);

    FlatNetwork network = createNetwork();

    System.out.println("Starting Weights:");
    displayWeights(network);
    evaluate(network,trainingSet);
View Full Code Here

TOP

Related Classes of org.encog.neural.flat.FlatNetwork

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.