Package org.encog.ml.data

Examples of org.encog.ml.data.MLData


  /**
   * {@inheritDoc}
   */
  @Override
  public final int classify(final MLData input) {
    final MLData result = compute(input);
    return EngineArray.maxIndex(result.getData());
  }
View Full Code Here


   *            The input pattern.
   * @return The winning neuron.
   */
  public final MLData compute(final MLData input) {

    final MLData result = new BasicMLData(this.outputNeuronCount);

    for (int i = 0; i < this.outputNeuronCount; i++) {
      final Matrix optr = this.weights.getCol(i);
      final Matrix inputMatrix = Matrix.createRowMatrix(input.getData());
      result.setData(i, MatrixMath.dotProduct(inputMatrix, optr));
    }

    return result;
  }
View Full Code Here

   *            The input pattern.
   * @return The winning neuron.
   */
  public final int winner(final MLData input) {

    final MLData output = compute(input);
    final int win = EngineArray.indexOfLargest(output.getData());
    return win;
  }
View Full Code Here

          && section.getSubSectionName().equals("SAMPLES")) {
        for (final String line : section.getLines()) {
          final List<String> cols = EncogFileSection
              .splitColumns(line);
          int index = 0;
          final MLData inputData = new BasicMLData(inputCount);
          for (int i = 0; i < inputCount; i++) {
            inputData.setData(i,
                CSVFormat.EG_FORMAT.parse(cols.get(index++)));
          }
          final MLData idealData = new BasicMLData(inputCount);
          for (int i = 0; i < outputCount; i++) {
            idealData.setData(i,
                CSVFormat.EG_FORMAT.parse(cols.get(index++)));
          }
          final MLDataPair pair = new BasicMLDataPair(inputData,
              idealData);
          samples.add(pair);
View Full Code Here

      final MLData leastRepresented) {

    double maxActivation = Double.MIN_VALUE;
    int maxActivationNeuron = -1;

    final MLData output = this.network.compute(leastRepresented);

    // Loop over all of the output neurons. Consider any neurons that were
    // not the BMU (winner) for any pattern. Track which of these
    // non-winning neurons had the highest activation.
    for (int outputNeuron = 0; outputNeuron < won.length; outputNeuron++) {
      // Only consider neurons that did not "win".
      if (won[outputNeuron] == 0) {
        if ((maxActivationNeuron == -1)
            || (output.getData(outputNeuron) > maxActivation)) {
          maxActivation = output.getData(outputNeuron);
          maxActivationNeuron = outputNeuron;
        }
      }
    }
View Full Code Here

    // Reset the BMU and begin this iteration.
    this.bmuUtil.reset();
    final int[] won = new int[this.outputNeuronCount];
    double leastRepresentedActivation = Double.MAX_VALUE;
    MLData leastRepresented = null;

    // Reset the correction matrix for this synapse and iteration.
    this.correctionMatrix.clear();

    // Determine the BMU for each training element.
    for (final MLDataPair pair : getTraining()) {
      final MLData input = pair.getInput();

      final int bmu = this.bmuUtil.calculateBMU(input);

      // If we are to force a winner each time, then track how many
      // times each output neuron becomes the BMU (winner).
      if (this.forceWinner) {
        won[bmu]++;

        // Get the "output" from the network for this pattern. This
        // gets the activation level of the BMU.
        final MLData output = this.network.compute(pair.getInput());

        // Track which training entry produces the least BMU. This
        // pattern is the least represented by the network.
        if (output.getData(bmu) < leastRepresentedActivation) {
          leastRepresentedActivation = output.getData(bmu);
          leastRepresented = pair.getInput();
        }
      }

      train(bmu, this.network.getWeights(), input);
View Full Code Here

   * @param pattern
   *            The pattern to train.
   */
  public final void trainPattern(final MLData pattern) {

    final MLData input = pattern;
    final int bmu = this.bmuUtil.calculateBMU(input);
    train(bmu, this.network.getWeights(), input);
    applyCorrection();

  }
View Full Code Here

    ImageNeuralData input = new ImageNeuralData(image);
    set.add(input,ideal);
    set.downsample(2,2);
    Iterator<MLDataPair> itr = set.iterator();
    MLDataPair pair = (MLDataPair)itr.next();
    MLData data = pair.getInput();
    double[] d = data.getData();
    //Assert.assertEquals(d[0],-1.0, 0.1);
    //Assert.assertEquals(d[5],1, 0.1);
   
    // just "flex" these for no exceptions
    input.toString();
View Full Code Here

   
    public static boolean verifyXOR(MLRegression network,double tolerance)
    {
      for(int trainingSet=0;trainingSet<XOR.XOR_IDEAL.length;trainingSet++)
      {
        MLData actual = network.compute(new BasicMLData(XOR.XOR_INPUT[trainingSet]));
       
        for(int i=0;i<XOR.XOR_IDEAL[0].length;i++)
        {
          double diff = Math.abs(actual.getData(i)-XOR.XOR_IDEAL[trainingSet][i]);
          if( diff>tolerance )
            return false;
        }
       
      }
View Full Code Here

    public static MLDataSet createNoisyXORDataSet(int count) {
      MLDataSet result = new BasicMLDataSet();
      for(int i=0;i<count;i++) {
        for(int j=0;j<4;j++) {
          MLData inputData = new BasicMLData(XOR_INPUT[j]);
          MLData idealData = new BasicMLData(XOR_IDEAL[j]);
          MLDataPair pair = new BasicMLDataPair(inputData,idealData);
          inputData.setData(0, inputData.getData(0)+RangeRandomizer.randomize(-0.1, 0.1));
          inputData.setData(1, inputData.getData(1)+RangeRandomizer.randomize(-0.1, 0.1));
          result.add(pair);
        }
View Full Code Here

TOP

Related Classes of org.encog.ml.data.MLData

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.