Package com.github.neuralnetworks.architecture

Examples of com.github.neuralnetworks.architecture.Layer


    private static final long serialVersionUID = 1L;

    private Layer hiddenLayer;

    public Autoencoder(int inputUnitCount, int hiddenUnitCount, boolean addBias) {
  this(new Layer(), new Layer(), new Layer(), inputUnitCount, hiddenUnitCount, addBias);
    }
View Full Code Here


     * @param layer
     * @param addBias
     * @return this
     */
    public DBN addLevel(Layer layer, int visibleUnitCount, int hiddenUnitCount, boolean addBias) {
  Layer currentOutputLayer = getOutputLayer();
  if (currentOutputLayer != null) {
      addNeuralNetwork(new RBM(currentOutputLayer, layer, visibleUnitCount, hiddenUnitCount, addBias, addBias));
  } else {
      addLayer(layer);
  }
View Full Code Here

public class CNNTest {

    @Test
    public void testDimensions() {
  // convolution dimensions
  Conv2DConnection conv = new Conv2DConnection(new Layer(), new Layer(), 4, 4, 3, 2, 2, 2, 1);

  assertEquals(3, conv.getOutputFeatureMapColumns(), 0);
  assertEquals(3, conv.getOutputFeatureMapRows(), 0);
  assertEquals(2, conv.getOutputFilters(), 0);

  // subsampling dimensions
  Subsampling2DConnection sub = new Subsampling2DConnection(new Layer(), new Layer(), 5, 5, 2, 2, 3);

  assertEquals(2, sub.getOutputFeatureMapColumns(), 0);
  assertEquals(2, sub.getOutputFeatureMapRows(), 0);
  assertEquals(3, sub.getFilters(), 0);
    }
View Full Code Here

    @Test
    public void testCNNConstruction() {
  NeuralNetworkImpl nn = NNFactory.convNN(new int[][] { { 32, 32, 1 }, { 5, 5, 6, 1 }, { 2, 2 }, { 5, 5, 16, 1 }, { 2, 2 }, { 5, 5, 120, 1 }, {84}, {10} }, true);
  assertEquals(13, nn.getLayers().size(), 0);

  Layer l = nn.getInputLayer().getConnections().get(0).getOutputLayer();
  Conv2DConnection cc = (Conv2DConnection) nn.getInputLayer().getConnections().get(0);
  assertEquals(28, cc.getOutputFeatureMapRows(), 0);
  assertEquals(28, cc.getOutputFeatureMapColumns(), 0);
  assertEquals(6, cc.getOutputFilters(), 0);

  Subsampling2DConnection sc = (Subsampling2DConnection) l.getConnections().get(2);
  l = l.getConnections().get(2).getOutputLayer();
  assertEquals(14, sc.getOutputFeatureMapRows(), 0);
  assertEquals(14, sc.getOutputFeatureMapColumns(), 0);
  assertEquals(6, sc.getFilters(), 0);

  cc = (Conv2DConnection) l.getConnections().get(1);
  l = l.getConnections().get(1).getOutputLayer();
  assertEquals(10, cc.getOutputFeatureMapRows(), 0);
  assertEquals(10, cc.getOutputFeatureMapColumns(), 0);
  assertEquals(16, cc.getOutputFilters(), 0);

  sc = (Subsampling2DConnection) l.getConnections().get(2);
  l = l.getConnections().get(2).getOutputLayer();
  assertEquals(5, sc.getOutputFeatureMapRows(), 0);
  assertEquals(5, sc.getOutputFeatureMapColumns(), 0);
  assertEquals(16, sc.getFilters(), 0);

  cc = (Conv2DConnection) l.getConnections().get(1);
  l = l.getConnections().get(1).getOutputLayer();
  assertEquals(1, cc.getOutputFeatureMapRows(), 0);
  assertEquals(1, cc.getOutputFeatureMapColumns(), 0);
  assertEquals(120, cc.getOutputFilters(), 0);

  GraphConnections cg = (GraphConnections) l.getConnections().get(2);
  assertEquals(84, cg.getConnectionGraph().getRows(), 0);

  GraphConnections cg2 = (GraphConnections) cg.getOutputLayer().getConnections().get(2);
  assertEquals(10, cg2.getConnectionGraph().getRows(), 0);
    }
View Full Code Here

    public void testCNNConstruction2() {
  NeuralNetworkImpl nn = NNFactory.convNN(new int[][] { { 28, 28, 1 }, { 5, 5, 20, 1 }, { 2, 2 }, { 5, 5, 50, 1 }, { 2, 2 }, {500}, {10} }, true);
  assertEquals(11, nn.getLayers().size(), 0);

  Conv2DConnection cc = (Conv2DConnection) nn.getInputLayer().getConnections().get(0);
  Layer l = nn.getInputLayer().getConnections().get(0).getOutputLayer();
  assertEquals(24, cc.getOutputFeatureMapRows(), 0);
  assertEquals(24, cc.getOutputFeatureMapColumns(), 0);
  assertEquals(20, cc.getOutputFilters(), 0);

  Subsampling2DConnection sc = (Subsampling2DConnection) l.getConnections().get(2);
  l = l.getConnections().get(2).getOutputLayer();
  assertEquals(12, sc.getOutputFeatureMapRows(), 0);
  assertEquals(12, sc.getOutputFeatureMapColumns(), 0);
  assertEquals(20, sc.getFilters(), 0);

  cc = (Conv2DConnection) l.getConnections().get(1);
  l = l.getConnections().get(1).getOutputLayer();
  assertEquals(8, cc.getOutputFeatureMapRows(), 0);
  assertEquals(8, cc.getOutputFeatureMapColumns(), 0);
  assertEquals(50, cc.getOutputFilters(), 0);

  sc = (Subsampling2DConnection) l.getConnections().get(2);
  l = l.getConnections().get(2).getOutputLayer();
  assertEquals(4, sc.getOutputFeatureMapRows(), 0);
  assertEquals(4, sc.getOutputFeatureMapColumns(), 0);
  assertEquals(50, sc.getFilters(), 0);
  assertEquals(50 * 4 * 4, l.getConnections().get(0).getOutputUnitCount(), 0);

  Layer layer = l.getConnections().get(1).getOutputLayer();
  assertEquals(500, layer.getConnections().get(0).getOutputUnitCount(), 0);

  layer = layer.getConnections().get(2).getOutputLayer();
  assertEquals(500, layer.getConnections().get(0).getInputUnitCount(), 0);
  assertEquals(10, layer.getConnections().get(0).getOutputUnitCount(), 0);
    }
View Full Code Here

    public void testCNNConstruction3() {
  NeuralNetworkImpl nn = NNFactory.convNN(new int[][] { { 6, 6, 1 }, { 3, 3, 2, 2 }, { 2, 2 } }, true);
  assertEquals(4, nn.getLayers().size(), 0);

  Conv2DConnection cc = (Conv2DConnection) nn.getInputLayer().getConnections().get(0);
  Layer l = nn.getInputLayer().getConnections().get(0).getOutputLayer();
  assertEquals(2, cc.getOutputFeatureMapRows(), 0);
  assertEquals(2, cc.getOutputFeatureMapColumns(), 0);
  assertEquals(2, cc.getOutputFilters(), 0);

  Subsampling2DConnection sc = (Subsampling2DConnection) l.getConnections().get(2);
  l = l.getConnections().get(2).getOutputLayer();
  assertEquals(1, sc.getOutputFeatureMapRows(), 0);
  assertEquals(1, sc.getOutputFeatureMapColumns(), 0);
  assertEquals(2, sc.getFilters(), 0);
    }
View Full Code Here

  NNFactory.lcMaxPooling(nn);

  // feedforwad cc
  LayerCalculatorImpl lc = (LayerCalculatorImpl) nn.getLayerCalculator();

  Layer l = nn.getInputLayer();

  assertTrue(lc.getConnectionCalculator(l) instanceof ConnectionCalculatorConv);

  l = l.getConnections().get(0).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof ConnectionCalculatorConv);

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof AparapiMaxPooling2D);

  l = l.getConnections().get(1).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof ConnectionCalculatorConv);

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof AparapiMaxPooling2D);

  l = l.getConnections().get(1).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof ConnectionCalculatorFullyConnected);

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof ConnectionCalculatorFullyConnected);

  // backpropagation cc
  BackPropagationTrainer<?> bpt = TrainerFactory.backPropagation(nn, null, null, null, null, 0.01f, 0.5f, 0f, 0f);
  BackPropagationLayerCalculatorImpl bplc = (BackPropagationLayerCalculatorImpl) bpt.getBPLayerCalculator();

  l = nn.getInputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackPropagationConv2DSigmoid);

  l = l.getConnections().get(0).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackpropagationMaxPooling2D);
  assertTrue(bplc.getConnectionCalculator(l.getConnections().get(1).getInputLayer()) instanceof BackPropagationConv2D);   // bias

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackPropagationConv2DSigmoid);

  l = l.getConnections().get(1).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackpropagationMaxPooling2D);
  assertTrue(bplc.getConnectionCalculator(l.getConnections().get(1).getInputLayer()) instanceof BackPropagationConv2D);

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackPropagationSigmoid);

  l = l.getConnections().get(1).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l.getConnections().get(1).getInputLayer()) instanceof BackPropagationSigmoid);
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackPropagationSigmoid);

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l.getConnections().get(1).getInputLayer()) instanceof BackPropagationSigmoid);
  assertTrue(bplc.getConnectionCalculator(l) == null);

  // simple convolutional network
  nn = NNFactory.convNN(new int[][] { { 28, 28, 1 }, { 1, 1 }, {10} }, false);
  nn.setLayerCalculator(NNFactory.lcSigmoid(nn, null));
  NNFactory.lcMaxPooling(nn);

  l = nn.getInputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof ConnectionCalculatorConv);

  l = l.getConnections().get(0).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof AparapiSubsampling2D);

  l = l.getConnections().get(0).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof AparapiSigmoid);

  bpt = TrainerFactory.backPropagation(nn, null, null, new MultipleNeuronsOutputError(), null, 0.02f, 0.5f, 0f, 0f);
  bplc = (BackPropagationLayerCalculatorImpl) bpt.getBPLayerCalculator();

  l = nn.getInputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof BackpropagationMaxPooling2D);

  l = l.getConnections().get(0).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof BackPropagationSigmoid);
    }
View Full Code Here

      throw new IllegalArgumentException("first layer must be convolutional");
  }

  NeuralNetworkImpl result = new NeuralNetworkImpl();

  Layer prev = null;
  int prevUnitCount = layers[0][0] * layers[0][1] * layers[0][2];
  result.addLayer(prev = new Layer());
  for (int i = 1; i < layers.length; i++) {
      int[] l = layers[i];
      Layer newLayer = null;
      Layer biasLayer = null;
      if (l.length == 1) {
    new FullyConnected(prev, newLayer = new Layer(), prevUnitCount, l[0]);
    if (addBias) {
        new FullyConnected(biasLayer = new Layer(), newLayer, 1, l[0]);
    }

    prevUnitCount = l[0];
      } else if (l.length == 4 || l.length == 2) {
    Integer inputFMRows = null;
    Integer inputFMCols = null;
    Integer filters = null;
    if (i == 1) {
        inputFMRows = layers[0][0];
        inputFMCols = layers[0][1];
        filters = layers[0][2];
    } else {
        for (Connections c : prev.getConnections()) {
      if (c.getOutputLayer() == prev) {
          if (c instanceof Conv2DConnection) {
        Conv2DConnection cc = (Conv2DConnection) c;
        inputFMRows = cc.getOutputFeatureMapRows();
        inputFMCols = cc.getOutputFeatureMapColumns();
        filters = cc.getOutputFilters();
        break;
          } else if (c instanceof Subsampling2DConnection) {
        Subsampling2DConnection sc = (Subsampling2DConnection) c;
        inputFMRows = sc.getOutputFeatureMapRows();
        inputFMCols = sc.getOutputFeatureMapColumns();
        filters = sc.getFilters();
        break;
          }
      }
        }
    }

    if (l.length == 4) {
        Conv2DConnection c = new Conv2DConnection(prev, newLayer = new Layer(), inputFMRows, inputFMCols, filters, l[0], l[1], l[2], l[3]);
        if (addBias) {
      new Conv2DConnection(biasLayer = new Layer(), newLayer, c.getOutputFeatureMapRows(), c.getOutputFeatureMapColumns(), 1, 1, 1, l[2], l[3]);
        }

        prevUnitCount = c.getOutputUnitCount();
    } else if (l.length == 2) {
        Subsampling2DConnection c = new Subsampling2DConnection(prev, newLayer = new Layer(), inputFMRows, inputFMCols, l[0], l[1], filters);
        prevUnitCount = c.getOutputUnitCount();
    }
      }

      result.addLayer(newLayer);
View Full Code Here

  if (layers.length <= 1) {
      throw new IllegalArgumentException("more than one layer is required");
  }

  NeuralNetworkImpl result = new NeuralNetworkImpl();
  addFullyConnectedLayer(result, new Layer(), layers[0], layers[0], addBias);
  for (int i = 1; i < layers.length; i++) {
      addFullyConnectedLayer(result, new Layer(), layers[i - 1], layers[i], addBias);
  }

  return result;
    }
View Full Code Here

  if (nn.addLayer(layer) && nn.getOutputLayer() != layer) {
      result = new FullyConnected(nn.getOutputLayer(), layer, inputUnitCount, outputUnitCount);
  }

  if (addBias && nn.getInputLayer() != layer) {
      Layer biasLayer = new Layer();
      nn.addLayer(biasLayer);
      return new FullyConnected(biasLayer, layer, 1, outputUnitCount);
  }

  return result;
View Full Code Here

TOP

Related Classes of com.github.neuralnetworks.architecture.Layer

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.