Package org.neuroph.core.transfer

Examples of org.neuroph.core.transfer.TransferFunction


                    inputFunction = new InputFunction(weightsFunction, summingFunction);                   
                }



    TransferFunction transferFunction = createTransferFunction(neuronProperties.getTransferFunctionProperties());

    Neuron neuron = null;
    Class neuronClass = neuronProperties.getNeuronType();

                // use two param constructor to create neuron
View Full Code Here


   * @param tfProperties
   *            transfer function properties
   * @return returns transfer function
   */
  private static TransferFunction createTransferFunction(Properties tfProperties) {
    TransferFunction transferFunction = null;

    Class  tfClass = (Class)tfProperties.getProperty("transferFunction");

                    try {
                        Class[] paramTypes = null;
View Full Code Here

        @Override
  protected void adjustOutputNeurons(double[] patternError) {
           
                // get output layer
                MatrixMlpLayer outputLayer = (MatrixMlpLayer)matrixLayers[matrixLayers.length - 1];
                TransferFunction transferFunction = outputLayer.getTransferFunction();

                // get output vector
                double[] outputs = outputLayer.getOutputs();
                double[] netInputs = outputLayer.getNetInput();
                double[] neuronErrors = outputLayer.getErrors(); // these will hold  -should be set from here!!!!

                // calculate errors(deltas) for all output neurons
                for(int i = 0; i < outputs.length; i++) {
                    neuronErrors[i] = patternError[i] * transferFunction.getDerivative(netInputs[i]); // ovde mi treba weighted sum, da ne bi morao ponovo da racunam
                }

                // update weights
                this.updateLayerWeights(outputLayer, neuronErrors);
        }
View Full Code Here

            int layersCount = matrixMlp.getLayersCount();
           
            for ( int layerIdx = layersCount -2 ; layerIdx > 0 ; layerIdx--) {
               MatrixMlpLayer currentLayer = (MatrixMlpLayer)matrixLayers[layerIdx];

               TransferFunction transferFunction = currentLayer.getTransferFunction();
               int neuronsCount = currentLayer.getNeuronsCount();

               double[] neuronErrors = currentLayer.getErrors();
               double[] netInputs = currentLayer.getNetInput();

               MatrixMlpLayer nextLayer = (MatrixMlpLayer)currentLayer.getNextLayer();
               double[] nextLayerErrors = nextLayer.getErrors();
               double[][] nextLayerWeights = nextLayer.getWeights();

               // calculate error for each neuron in current layer
               for(int neuronIdx = 0; neuronIdx < neuronsCount; neuronIdx++) {
                   // calculate weighted sum of errors of all neuron it is attached to - calculate how much this neuron is contributing to errors in next layer
                   double weightedErrorsSum = 0;

                   for(int nextLayerNeuronIdx = 0; nextLayerNeuronIdx < nextLayer.getNeuronsCount(); nextLayerNeuronIdx++) {
                     weightedErrorsSum += nextLayerErrors[nextLayerNeuronIdx] * nextLayerWeights[nextLayerNeuronIdx][neuronIdx];
                   }

                   // calculate the error for this neuron
                   neuronErrors[neuronIdx] = transferFunction.getDerivative(netInputs[neuronIdx]) * weightedErrorsSum;
               } // neuron iterator

               this.updateLayerWeights(currentLayer, neuronErrors);
              
            } // layer iterator
View Full Code Here

      double d = connection.getToNeuron().getError()
          * connection.getWeight().getValue();
      delta_sum += d; // weighted sum from the next layer
    } // for

    TransferFunction transferFunction = neuron.getTransferFunction();
    double netInput = neuron.getNetInput();
    double f1 = transferFunction.getDerivative(netInput);
    double delta = f1 * delta_sum;
    return delta;
  }
View Full Code Here

        neuron.setError(0);
                                i++;
        continue;
      }
     
      TransferFunction transferFunction = neuron.getTransferFunction();
      double neuronInput = neuron.getNetInput();
      double delta = outputError * transferFunction.getDerivative(neuronInput);
      neuron.setError(delta);
      this.updateNeuronWeights(neuron);       
      i++;
    } // for       
  }
View Full Code Here

  private static FlatLayer flattenLayer(Layer layer) {
    boolean inputLayer = false;
    Set<Class<?>> transferFunctions = new HashSet<Class<?>>();
    int neuronCount = 0;
    int biasCount = 0;
    TransferFunction transfer = null;

    for (Neuron neuron : layer.getNeurons()) {
      if (neuron.getClass() == InputNeuron.class)
        inputLayer = true;

      if (neuron.getClass() == Neuron.class
          || neuron.getClass() == InputNeuron.class) {
        neuronCount++;

        transfer = neuron.getTransferFunction();
        transferFunctions.add(transfer.getClass());
      } else if (neuron.getClass() == BiasNeuron.class)
        biasCount++;
    }

    if (transferFunctions.size() > 1)
View Full Code Here

TOP

Related Classes of org.neuroph.core.transfer.TransferFunction

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.