Java Code Examples for org.deeplearning4j.nn.conf.layers.FeedForwardLayer#getNOut()

The following examples show how to use org.deeplearning4j.nn.conf.layers.FeedForwardLayer#getNOut() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: KerasLayer.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
/**
 * Some DL4J layers need explicit specification of number of inputs, which Keras does infer.
 * This method searches through previous layers until a FeedForwardLayer is found. These layers
 * have nOut values that subsequently correspond to the nIn value of this layer.
 *
 * @param previousLayers
 * @return
 * @throws UnsupportedKerasConfigurationException
 */
protected long getNInFromConfig(Map<String, ? extends KerasLayer> previousLayers) throws UnsupportedKerasConfigurationException {
    int size = previousLayers.size();
    int count = 0;
    long nIn;
    String inboundLayerName = inboundLayerNames.get(0);
    while (count <= size) {
        if (previousLayers.containsKey(inboundLayerName)) {
            KerasLayer inbound = previousLayers.get(inboundLayerName);
            try {
                FeedForwardLayer ffLayer = (FeedForwardLayer) inbound.getLayer();
                nIn = ffLayer.getNOut();
                if (nIn > 0)
                    return nIn;
                count++;
                inboundLayerName = inbound.getInboundLayerNames().get(0);
            } catch (Exception e) {
                inboundLayerName = inbound.getInboundLayerNames().get(0);
            }
        }
    }
    throw new UnsupportedKerasConfigurationException("Could not determine number of input channels for" +
            "depthwise convolution.");
}
 
Example 2
Source File: MultiLayerNetwork.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
/**
 * Return the layer size (number of units) for the specified layer.<br>
 * Note that the meaning of the "layer size" can depend on the type of layer. For example:<br>
 * - DenseLayer, OutputLayer, recurrent layers: number of units (nOut configuration option)<br>
 * - ConvolutionLayer: the channels (number of channels)<br>
 * - Subsampling layers, global pooling layers, etc: size of 0 is always returned<br>
 *
 * @param layer Index of the layer to get the size of. Must be in range 0 to nLayers-1 inclusive
 * @return Size of the layer
 */
public int layerSize(int layer) {
    if (layer < 0 || layer > layers.length) {
        throw new IllegalArgumentException("Invalid layer index: " + layer + ". Layer index must be between 0 and "
                + (layers.length - 1) + " inclusive");
    }
    org.deeplearning4j.nn.conf.layers.Layer conf = layers[layer].conf().getLayer();
    if (conf == null || !(conf instanceof FeedForwardLayer)) {
        return 0;
    }
    FeedForwardLayer ffl = (FeedForwardLayer) conf;

    if (ffl.getNOut() > Integer.MAX_VALUE)
        throw new ND4JArraySizeException();
    return (int) ffl.getNOut();
}
 
Example 3
Source File: CDAEParameter.java    From jstarcraft-rns with Apache License 2.0 5 votes vote down vote up
@Override
public long numParams(NeuralNetConfiguration conf) {
    FeedForwardLayer layerConf = (FeedForwardLayer) conf.getLayer();
    return super.numParams(conf) + numberOfUsers * layerConf.getNOut(); // add
    // another
    // user
    // weight
    // matrix
}
 
Example 4
Source File: CDAEParameter.java    From jstarcraft-rns with Apache License 2.0 5 votes vote down vote up
@Override
public Map<String, INDArray> init(NeuralNetConfiguration conf, INDArray paramsView, boolean initializeParams) {
    Map<String, INDArray> params = super.init(conf, paramsView, initializeParams);
    FeedForwardLayer layerConf = (FeedForwardLayer) conf.getLayer();
    long nIn = layerConf.getNIn();
    long nOut = layerConf.getNOut();
    long nWeightParams = nIn * nOut;
    long nUserWeightParams = numberOfUsers * nOut;
    INDArray userWeightView = paramsView.get(new INDArrayIndex[] { NDArrayIndex.point(0), NDArrayIndex.interval(nWeightParams + nOut, nWeightParams + nOut + nUserWeightParams) });
    params.put(USER_KEY, this.createUserWeightMatrix(conf, userWeightView, initializeParams));
    conf.addVariable(USER_KEY);
    return params;
}
 
Example 5
Source File: CDAEParameter.java    From jstarcraft-rns with Apache License 2.0 5 votes vote down vote up
@Override
public Map<String, INDArray> getGradientsFromFlattened(NeuralNetConfiguration conf, INDArray gradientView) {
    Map<String, INDArray> out = super.getGradientsFromFlattened(conf, gradientView);
    FeedForwardLayer layerConf = (FeedForwardLayer) conf.getLayer();
    long nIn = layerConf.getNIn();
    long nOut = layerConf.getNOut();
    long nWeightParams = nIn * nOut;
    long nUserWeightParams = numberOfUsers * nOut;
    INDArray userWeightGradientView = gradientView.get(NDArrayIndex.point(0), NDArrayIndex.interval(nWeightParams + nOut, nWeightParams + nOut + nUserWeightParams)).reshape('f', numberOfUsers, nOut);
    out.put(USER_KEY, userWeightGradientView);
    return out;
}
 
Example 6
Source File: DeepFMParameter.java    From jstarcraft-rns with Apache License 2.0 5 votes vote down vote up
protected INDArray createWeightMatrix(NeuralNetConfiguration configuration, INDArray view, boolean initialize) {
    FeedForwardLayer layerConfiguration = (FeedForwardLayer) configuration.getLayer();
    if (initialize) {
        Distribution distribution = Distributions.createDistribution(layerConfiguration.getDist());
        return super.createWeightMatrix(numberOfFeatures, layerConfiguration.getNOut(), layerConfiguration.getWeightInit(), distribution, view, true);
    } else {
        return super.createWeightMatrix(numberOfFeatures, layerConfiguration.getNOut(), null, null, view, false);
    }
}
 
Example 7
Source File: DeepFMParameter.java    From jstarcraft-rns with Apache License 2.0 5 votes vote down vote up
@Override
public Map<String, INDArray> init(NeuralNetConfiguration configuration, INDArray view, boolean initialize) {
    Map<String, INDArray> parameters = Collections.synchronizedMap(new LinkedHashMap<String, INDArray>());
    FeedForwardLayer layerConfiguration = (FeedForwardLayer) configuration.getLayer();
    long numberOfOut = layerConfiguration.getNOut();
    long numberOfWeights = numberOfFeatures * numberOfOut;
    INDArray weight = view.get(new INDArrayIndex[] { NDArrayIndex.point(0), NDArrayIndex.interval(0, numberOfWeights) });
    INDArray bias = view.get(NDArrayIndex.point(0), NDArrayIndex.interval(numberOfWeights, numberOfWeights + numberOfOut));

    parameters.put(WEIGHT_KEY, this.createWeightMatrix(configuration, weight, initialize));
    parameters.put(BIAS_KEY, createBias(configuration, bias, initialize));
    configuration.addVariable(WEIGHT_KEY);
    configuration.addVariable(BIAS_KEY);
    return parameters;
}
 
Example 8
Source File: DeepFMParameter.java    From jstarcraft-rns with Apache License 2.0 5 votes vote down vote up
@Override
public Map<String, INDArray> getGradientsFromFlattened(NeuralNetConfiguration configuration, INDArray view) {
    Map<String, INDArray> gradients = new LinkedHashMap<>();
    FeedForwardLayer layerConfiguration = (FeedForwardLayer) configuration.getLayer();
    long numberOfOut = layerConfiguration.getNOut();
    long numberOfWeights = numberOfFeatures * numberOfOut;
    INDArray weight = view.get(NDArrayIndex.point(0), NDArrayIndex.interval(0, numberOfWeights)).reshape('f', numberOfWeights, numberOfOut);
    INDArray bias = view.get(NDArrayIndex.point(0), NDArrayIndex.interval(numberOfWeights, numberOfWeights + numberOfOut));
    gradients.put(WEIGHT_KEY, weight);
    gradients.put(BIAS_KEY, bias);
    return gradients;
}
 
Example 9
Source File: DeepFMParameter.java    From jstarcraft-rns with Apache License 2.0 4 votes vote down vote up
@Override
public long numParams(NeuralNetConfiguration configuration) {
    FeedForwardLayer layerConfiguration = (FeedForwardLayer) configuration.getLayer();
    return numberOfFeatures * layerConfiguration.getNOut() + layerConfiguration.getNOut();
}