Package com.github.neuralnetworks.architecture

Examples of com.github.neuralnetworks.architecture.Layer


  Layer hiddenLayer = getHiddenLayer();
  return hiddenLayer.getConnections().stream().map(c -> Util.getOppositeLayer(c, hiddenLayer)).filter(l -> Util.isBias(l)).findFirst().orElse(null);
    }

    public Layer getOutputBiasLayer() {
  Layer outputLayer = getOutputLayer();
  return outputLayer.getConnections().stream().map(c -> Util.getOppositeLayer(c, outputLayer)).filter(l -> Util.isBias(l)).findFirst().orElse(null);
    }
View Full Code Here


  LayerCalculatorImpl lc = (LayerCalculatorImpl) nn.getLayerCalculator();

  List<ConnectionCandidate> connections = new BreadthFirstOrderStrategy(nn, nn.getOutputLayer()).order();

  if (connections.size() > 0) {
      Layer current = null;
      List<Connections> chunk = new ArrayList<>();
      Set<Layer> convCalculatedLayers = new HashSet<>(); // tracks
                     // convolutional
                     // layers
                     // (because their
                     // calculations
                     // are
                     // interlinked)
      convCalculatedLayers.add(nn.getOutputLayer());

      for (int i = 0; i < connections.size(); i++) {
    ConnectionCandidate c = connections.get(i);
    chunk.add(c.connection);

    if (i == connections.size() - 1 || connections.get(i + 1).target != c.target) {
        current = c.target;

        ConnectionCalculator result = null;
        ConnectionCalculator ffcc = null;
        if (Util.isBias(current)) {
      ffcc = lc.getConnectionCalculator(current.getConnections().get(0).getOutputLayer());
        } else if (Util.isConvolutional(current) || Util.isSubsampling(current)) {
      if (chunk.size() != 1) {
          throw new IllegalArgumentException("Convolutional layer with more than one connection");
      }

      ffcc = lc.getConnectionCalculator(Util.getOppositeLayer(chunk.iterator().next(), current));
        } else {
      ffcc = lc.getConnectionCalculator(current);
        }

        if (ffcc instanceof AparapiSigmoid) {
      result = new BackPropagationSigmoid(p);
        } else if (ffcc instanceof AparapiTanh) {
      result = new BackPropagationTanh(p);
        } else if (ffcc instanceof AparapiSoftReLU) {
      result = new BackPropagationSoftReLU(p);
        } else if (ffcc instanceof AparapiReLU) {
      result = new BackPropagationReLU(p);
        } else if (ffcc instanceof AparapiMaxout) {
      result = new BackpropagationMaxout(p);
        } else if (ffcc instanceof AparapiMaxPooling2D || ffcc instanceof AparapiStochasticPooling2D) {
      result = new BackpropagationMaxPooling2D();
        } else if (ffcc instanceof AparapiAveragePooling2D) {
      result = new BackpropagationAveragePooling2D();
        } else if (ffcc instanceof ConnectionCalculatorConv) {
      Layer opposite = Util.getOppositeLayer(chunk.iterator().next(), current);
      if (!convCalculatedLayers.contains(opposite)) {
          convCalculatedLayers.add(opposite);

          if (ffcc instanceof AparapiConv2DSigmoid) {
        result = new BackPropagationConv2DSigmoid(p);
View Full Code Here

    @Test
    public void testTensorProvider2() {
  NeuralNetworkImpl nn = new NeuralNetworkImpl();

  Layer i = new Layer();
  Layer h = new Layer();
  Layer o = new Layer();

  nn.addLayer(i);

  Environment.getInstance().setUseWeightsSharedMemory(true);
  ConnectionFactory cf = new ConnectionFactory();
View Full Code Here

  addLayer(hiddenLayer);

  new FullyConnected(visibleLayer, hiddenLayer, visibleUnitCount, hiddenUnitCount);

  if (addVisibleBias) {
      Layer visibleBiasLayer = new Layer();
      addLayer(visibleBiasLayer);
      new FullyConnected(visibleBiasLayer, visibleLayer, 1, visibleUnitCount);
  }

  if (addHiddenBias) {
      Layer hiddenBiasLayer = new Layer();
      addLayer(hiddenBiasLayer);
      new FullyConnected(hiddenBiasLayer, hiddenLayer, 1, hiddenUnitCount);
  }
    }
View Full Code Here

  List<Connections> chunkCalc = new ArrayList<>();
  for (BackPropagationConnectionCalculator bc : calculators) {
      chunkCalc.clear();

      Layer target = targetLayer;
      for (Connections c : connections) {
    if (connectionCalculators.get(c) == bc) {
        chunkCalc.add(c);
        if (Util.isBias(c.getInputLayer()) && c.getInputLayer() != targetLayer && !(bc instanceof AparapiBackpropMaxout)) {
      target = c.getInputLayer();
View Full Code Here

    @Test
    public void testDimensions() {
  // convolution dimensions
  Environment.getInstance().setUseWeightsSharedMemory(true);
  ConnectionFactory cf = new ConnectionFactory();
  Conv2DConnection conv = cf.conv2d(new Layer(), new Layer(), 4, 4, 3, 2, 2, 2, 1);

  assertEquals(3, conv.getOutputFeatureMapColumns(), 0);
  assertEquals(3, conv.getOutputFeatureMapRows(), 0);
  assertEquals(2, conv.getOutputFilters(), 0);

  // subsampling dimensions
  Subsampling2DConnection sub = cf.subsampling2D(new Layer(), new Layer(), 5, 5, 2, 2, 3);

  assertEquals(2, sub.getOutputFeatureMapColumns(), 0);
  assertEquals(2, sub.getOutputFeatureMapRows(), 0);
  assertEquals(3, sub.getFilters(), 0);
    }
View Full Code Here

    public void testCNNConstruction() {
  Environment.getInstance().setUseWeightsSharedMemory(true);
  NeuralNetworkImpl nn = NNFactory.convNN(new int[][] { { 32, 32, 1 }, { 5, 5, 6, 1 }, { 2, 2 }, { 5, 5, 16, 1 }, { 2, 2 }, { 5, 5, 120, 1 }, {84}, {10} }, true);
  assertEquals(13, nn.getLayers().size(), 0);

  Layer l = nn.getInputLayer().getConnections().get(0).getOutputLayer();
  Conv2DConnection cc = (Conv2DConnection) nn.getInputLayer().getConnections().get(0);
  assertEquals(28, cc.getOutputFeatureMapRows(), 0);
  assertEquals(28, cc.getOutputFeatureMapColumns(), 0);
  assertEquals(6, cc.getOutputFilters(), 0);

  Subsampling2DConnection sc = (Subsampling2DConnection) l.getConnections().get(2);
  l = l.getConnections().get(2).getOutputLayer();
  assertEquals(14, sc.getOutputFeatureMapRows(), 0);
  assertEquals(14, sc.getOutputFeatureMapColumns(), 0);
  assertEquals(6, sc.getFilters(), 0);

  cc = (Conv2DConnection) l.getConnections().get(1);
  l = l.getConnections().get(1).getOutputLayer();
  assertEquals(10, cc.getOutputFeatureMapRows(), 0);
  assertEquals(10, cc.getOutputFeatureMapColumns(), 0);
  assertEquals(16, cc.getOutputFilters(), 0);

  sc = (Subsampling2DConnection) l.getConnections().get(2);
  l = l.getConnections().get(2).getOutputLayer();
  assertEquals(5, sc.getOutputFeatureMapRows(), 0);
  assertEquals(5, sc.getOutputFeatureMapColumns(), 0);
  assertEquals(16, sc.getFilters(), 0);

  cc = (Conv2DConnection) l.getConnections().get(1);
  l = l.getConnections().get(1).getOutputLayer();
  assertEquals(1, cc.getOutputFeatureMapRows(), 0);
  assertEquals(1, cc.getOutputFeatureMapColumns(), 0);
  assertEquals(120, cc.getOutputFilters(), 0);

  FullyConnected cg = (FullyConnected) l.getConnections().get(2);
  assertEquals(84, cg.getWeights().getRows(), 0);

  FullyConnected cg2 = (FullyConnected) cg.getOutputLayer().getConnections().get(2);
  assertEquals(10, cg2.getWeights().getRows(), 0);
    }
View Full Code Here

  Environment.getInstance().setUseWeightsSharedMemory(true);
  NeuralNetworkImpl nn = NNFactory.convNN(new int[][] { { 28, 28, 1 }, { 5, 5, 20, 1 }, { 2, 2 }, { 5, 5, 50, 1 }, { 2, 2 }, {500}, {10} }, true);
  assertEquals(11, nn.getLayers().size(), 0);

  Conv2DConnection cc = (Conv2DConnection) nn.getInputLayer().getConnections().get(0);
  Layer l = nn.getInputLayer().getConnections().get(0).getOutputLayer();
  assertEquals(24, cc.getOutputFeatureMapRows(), 0);
  assertEquals(24, cc.getOutputFeatureMapColumns(), 0);
  assertEquals(20, cc.getOutputFilters(), 0);

  Subsampling2DConnection sc = (Subsampling2DConnection) l.getConnections().get(2);
  l = l.getConnections().get(2).getOutputLayer();
  assertEquals(12, sc.getOutputFeatureMapRows(), 0);
  assertEquals(12, sc.getOutputFeatureMapColumns(), 0);
  assertEquals(20, sc.getFilters(), 0);

  cc = (Conv2DConnection) l.getConnections().get(1);
  l = l.getConnections().get(1).getOutputLayer();
  assertEquals(8, cc.getOutputFeatureMapRows(), 0);
  assertEquals(8, cc.getOutputFeatureMapColumns(), 0);
  assertEquals(50, cc.getOutputFilters(), 0);

  sc = (Subsampling2DConnection) l.getConnections().get(2);
  l = l.getConnections().get(2).getOutputLayer();
  assertEquals(4, sc.getOutputFeatureMapRows(), 0);
  assertEquals(4, sc.getOutputFeatureMapColumns(), 0);
  assertEquals(50, sc.getFilters(), 0);
  assertEquals(50 * 4 * 4, l.getConnections().get(0).getOutputUnitCount(), 0);

  Layer layer = l.getConnections().get(1).getOutputLayer();
  assertEquals(500, layer.getConnections().get(0).getOutputUnitCount(), 0);

  layer = layer.getConnections().get(2).getOutputLayer();
  assertEquals(500, layer.getConnections().get(0).getInputUnitCount(), 0);
  assertEquals(10, layer.getConnections().get(0).getOutputUnitCount(), 0);
    }
View Full Code Here

  Environment.getInstance().setUseWeightsSharedMemory(true);
  NeuralNetworkImpl nn = NNFactory.convNN(new int[][] { { 6, 6, 1 }, { 3, 3, 2, 2 }, { 2, 2 } }, true);
  assertEquals(4, nn.getLayers().size(), 0);

  Conv2DConnection cc = (Conv2DConnection) nn.getInputLayer().getConnections().get(0);
  Layer l = nn.getInputLayer().getConnections().get(0).getOutputLayer();
  assertEquals(2, cc.getOutputFeatureMapRows(), 0);
  assertEquals(2, cc.getOutputFeatureMapColumns(), 0);
  assertEquals(2, cc.getOutputFilters(), 0);

  Subsampling2DConnection sc = (Subsampling2DConnection) l.getConnections().get(2);
  l = l.getConnections().get(2).getOutputLayer();
  assertEquals(1, sc.getOutputFeatureMapRows(), 0);
  assertEquals(1, sc.getOutputFeatureMapColumns(), 0);
  assertEquals(2, sc.getFilters(), 0);
    }
View Full Code Here

  NNFactory.lcMaxPooling(nn);

  // feedforwad cc
  LayerCalculatorImpl lc = (LayerCalculatorImpl) nn.getLayerCalculator();

  Layer l = nn.getInputLayer();

  assertTrue(lc.getConnectionCalculator(l) instanceof ConnectionCalculatorConv);

  l = l.getConnections().get(0).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof ConnectionCalculatorConv);

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof AparapiMaxPooling2D);

  l = l.getConnections().get(1).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof ConnectionCalculatorConv);

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof AparapiMaxPooling2D);

  l = l.getConnections().get(1).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof ConnectionCalculatorFullyConnected);

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof ConnectionCalculatorFullyConnected);

  // backpropagation cc
  BackPropagationTrainer<?> bpt = TrainerFactory.backPropagation(nn, null, null, null, null, 0.01f, 0.5f, 0f, 0f, 0f, 1, 1, 1);
  BackPropagationLayerCalculatorImpl bplc = (BackPropagationLayerCalculatorImpl) bpt.getBPLayerCalculator();

  l = nn.getInputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackPropagationConv2DSigmoid);

  l = l.getConnections().get(0).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackpropagationMaxPooling2D);
  assertTrue(bplc.getConnectionCalculator(l.getConnections().get(1).getInputLayer()) instanceof BackPropagationConv2D);   // bias

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackPropagationConv2DSigmoid);

  l = l.getConnections().get(1).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackpropagationMaxPooling2D);
  assertTrue(bplc.getConnectionCalculator(l.getConnections().get(1).getInputLayer()) instanceof BackPropagationConv2D);

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackPropagationSigmoid);

  l = l.getConnections().get(1).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l.getConnections().get(1).getInputLayer()) instanceof BackPropagationSigmoid);
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackPropagationSigmoid);

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l.getConnections().get(1).getInputLayer()) instanceof BackPropagationSigmoid);
  assertTrue(bplc.getConnectionCalculator(l) == null);

  // simple convolutional network
  Environment.getInstance().setUseWeightsSharedMemory(true);
  nn = NNFactory.convNN(new int[][] { { 28, 28, 1 }, { 1, 1 }, {10} }, false);
  nn.setLayerCalculator(NNFactory.lcSigmoid(nn, null));
  NNFactory.lcMaxPooling(nn);

  l = nn.getInputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof ConnectionCalculatorConv);

  l = l.getConnections().get(0).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof AparapiSubsampling2D);

  l = l.getConnections().get(0).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof AparapiSigmoid);

  bpt = TrainerFactory.backPropagation(nn, null, null, new MultipleNeuronsOutputError(), null, 0.02f, 0.5f, 0f, 0f, 0f, 1, 1, 1);
  bplc = (BackPropagationLayerCalculatorImpl) bpt.getBPLayerCalculator();

  l = nn.getInputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof BackpropagationMaxPooling2D);

  l = l.getConnections().get(0).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof BackPropagationSigmoid);
    }
View Full Code Here

TOP

Related Classes of com.github.neuralnetworks.architecture.Layer

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.