Package org.encog.engine.network.activation

Examples of org.encog.engine.network.activation.ActivationFunction


    Class<?> t = transferFunctions.iterator().next();

    double slope = 1;

    ActivationFunction activation = null;
   
    if (inputLayer)
      activation = new ActivationLinear();
    else if (t == Linear.class) {
      slope = ((Linear) transfer).getSlope();
View Full Code Here


  public final File SERIAL_FILENAME = TEMP_DIR.createFile("encogtest.ser");
   
  private NEATNetwork create()
  {
    List<NEATNeuron> neurons = new ArrayList<NEATNeuron>();
    ActivationFunction afSigmoid = new ActivationSigmoid();
    ActivationFunction afStep = new ActivationStep();
   
    // create the neurons
    NEATNeuron input1 = new NEATNeuron(
        NEATNeuronType.Input,
        1,
View Full Code Here

        flat.setActivationFunctions(new ActivationFunction[flat
            .getLayerCounts().length]);

        for (final String line : section.getLines()) {
          ActivationFunction af = null;
          final List<String> cols = EncogFileSection
              .splitColumns(line);
          final String name = "org.encog.engine.network.activation."
              + cols.get(0);
          try {
            final Class<?> clazz = Class.forName(name);
            af = (ActivationFunction) clazz.newInstance();
          } catch (final ClassNotFoundException e) {
            throw new PersistError(e);
          } catch (final InstantiationException e) {
            throw new PersistError(e);
          } catch (final IllegalAccessException e) {
            throw new PersistError(e);
          }

          for (int i = 0; i < af.getParamNames().length; i++) {
            af.setParam(i,
                CSVFormat.EG_FORMAT.parse(cols.get(i + 1)));
          }

          flat.getActivationFunctions()[index++] = af;
        }
View Full Code Here

   *            True if this is a tanh activation, false for sigmoid.
   */
  public FlatNetwork(final int input, final int hidden1, final int hidden2,
      final int output, final boolean tanh) {

    final ActivationFunction linearAct = new ActivationLinear();
    FlatLayer[] layers;
    final ActivationFunction act = tanh ? new ActivationTANH()
        : new ActivationSigmoid();

    if ((hidden1 == 0) && (hidden2 == 0)) {
      layers = new FlatLayer[2];
      layers[0] = new FlatLayer(linearAct, input,
View Full Code Here

        flat.setActivationFunctions(new ActivationFunction[flat
            .getLayerCounts().length]);

        for (final String line : section.getLines()) {
          ActivationFunction af = null;
          final List<String> cols = EncogFileSection
              .splitColumns(line);
          final String name = "org.encog.engine.network.activation."
              + cols.get(0);
          try {
            final Class<?> clazz = Class.forName(name);
            af = (ActivationFunction) clazz.newInstance();
          } catch (final ClassNotFoundException e) {
            throw new PersistError(e);
          } catch (final InstantiationException e) {
            throw new PersistError(e);
          } catch (final IllegalAccessException e) {
            throw new PersistError(e);
          }

          for (int i = 0; i < af.getParamNames().length; i++) {
            af.setParam(i,
                CSVFormat.EG_FORMAT.parse(cols.get(i + 1)));
          }

          flat.getActivationFunctions()[index++] = af;
        }
View Full Code Here

    } else {
      name = fn.toLowerCase();
      params = new double[0];
    }

    ActivationFunction af = allocateAF(name);
   
    if( af==null ) {
      return null;
    }

    if (af.getParamNames().length != params.length) {
      throw new EncogError(name + " expected "
          + af.getParamNames().length + ", but " + params.length
          + " were provided.");
    }

    for (int i = 0; i < af.getParamNames().length; i++) {
      af.setParam(i, params[i]);
    }

    return af;
  }
View Full Code Here

   */
  public FlatNetwork(final int input, final int hidden1, final int hidden2,
      final int output, final boolean tanh) {
    final double[] params = new double[1];
    FlatLayer[] layers;
    final ActivationFunction act = tanh ? new ActivationTANH()
        : new ActivationSigmoid();
    params[0] = 1; // slope

    if ((hidden1 == 0) && (hidden2 == 0)) {
      layers = new FlatLayer[2];
View Full Code Here

    final int toLayerIndex = this.layerIndex[currentLevel];
    final int fromLayerSize = this.layerCounts[currentLevel + 1];
    final int toLayerSize = this.layerFeedCounts[currentLevel];

    final int index = this.weightIndex[currentLevel];
    final ActivationFunction activation = this.network
        .getActivationFunctions()[currentLevel + 1];

    // handle weights
    int yi = fromLayerIndex;
    for (int y = 0; y < fromLayerSize; y++) {
      final double output = this.layerOutput[yi];
      double sum = 0;
      int xi = toLayerIndex;
      int wi = index + y;
      for (int x = 0; x < toLayerSize; x++) {
        this.gradients[wi] += output * this.layerDelta[xi];
        sum += this.weights[wi] * this.layerDelta[xi];
        wi += fromLayerSize;
        xi++;
      }

      this.layerDelta[yi] = sum
          * activation.derivativeFunction(this.layerOutput[yi]);
      yi++;
    }
  }
 
View Full Code Here

    final int toLayerIndex = this.layerIndex[currentLevel];
    final int fromLayerSize = this.layerCounts[currentLevel + 1];
    final int toLayerSize = this.layerFeedCounts[currentLevel];

    final int index = this.weightIndex[currentLevel];
    final ActivationFunction activation = this.network
        .getActivationFunctions()[currentLevel + 1];
    final double currentFlatSpot = this.flatSpot[currentLevel + 1];

    // handle weights
    int yi = fromLayerIndex;
    for (int y = 0; y < fromLayerSize; y++) {
      final double output = this.layerOutput[yi];
      double sum = 0;
      int xi = toLayerIndex;
      int wi = index + y;
      for (int x = 0; x < toLayerSize; x++) {
        this.gradients[wi] += output * this.layerDelta[xi];
        sum += this.weights[wi] * this.layerDelta[xi];
        wi += fromLayerSize;
        xi++;
      }

      this.layerDelta[yi] = sum
          * (activation.derivativeFunction(this.layerSums[yi],this.layerOutput[yi])+currentFlatSpot);
      yi++;
    }
  }
 
View Full Code Here

    // fix flat spot, if needed
    this.flatSpot = new double[this.network.getActivationFunctions().length];

    if (this.shouldFixFlatSpot) {
      for (int i = 0; i < this.network.getActivationFunctions().length; i++) {
        final ActivationFunction af = this.network
            .getActivationFunctions()[i];
       
        if( af instanceof ActivationSigmoid ) {
          this.flatSpot[i] = 0.1;
        } else {
View Full Code Here

TOP

Related Classes of org.encog.engine.network.activation.ActivationFunction

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.