org.deeplearning4j.nn.api.ParamInitializer Java Examples
The following examples show how to use
org.deeplearning4j.nn.api.ParamInitializer.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: DropConnect.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Override public INDArray getParameter(Layer layer, String paramKey, int iteration, int epoch, boolean train, LayerWorkspaceMgr workspaceMgr) { ParamInitializer init = layer.conf().getLayer().initializer(); INDArray param = layer.getParam(paramKey); double p; if(weightRetainProbSchedule == null){ p = weightRetainProb; } else { p = weightRetainProbSchedule.valueAt(iteration, epoch); } if (train && init.isWeightParam(layer.conf().getLayer(), paramKey) || (applyToBiases && init.isBiasParam(layer.conf().getLayer(), paramKey))) { INDArray out = workspaceMgr.createUninitialized(ArrayType.INPUT, param.dataType(), param.shape(), param.ordering()); Nd4j.getExecutioner().exec(new DropOut(param, out, p)); return out; } return param; }
Example #2
Source File: WeightNoise.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Override public INDArray getParameter(Layer layer, String paramKey, int iteration, int epoch, boolean train, LayerWorkspaceMgr workspaceMgr) { ParamInitializer init = layer.conf().getLayer().initializer(); INDArray param = layer.getParam(paramKey); if (train && init.isWeightParam(layer.conf().getLayer(), paramKey) || (applyToBias && init.isBiasParam(layer.conf().getLayer(), paramKey))) { org.nd4j.linalg.api.rng.distribution.Distribution dist = Distributions.createDistribution(distribution); INDArray noise = dist.sample(param.ulike()); INDArray out = workspaceMgr.createUninitialized(ArrayType.INPUT, param.dataType(), param.shape(), param.ordering()); if (additive) { Nd4j.getExecutioner().exec(new AddOp(param, noise,out)); } else { Nd4j.getExecutioner().exec(new MulOp(param, noise, out)); } return out; } return param; }
Example #3
Source File: BaseConstraint.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Override public void applyConstraint(Layer layer, int iteration, int epoch) { Map<String,INDArray> paramTable = layer.paramTable(); if(paramTable == null || paramTable.isEmpty() ){ return; } ParamInitializer i = layer.conf().getLayer().initializer(); for(Map.Entry<String,INDArray> e : paramTable.entrySet()){ if(params.contains(e.getKey())){ apply(e.getValue()); } if (params != null && params.contains(e.getKey())) { apply(e.getValue()); } } }
Example #4
Source File: CDAEConfiguration.java From jstarcraft-rns with Apache License 2.0 | 5 votes |
@Override public ParamInitializer initializer() { // This method returns the parameter initializer for this type of layer // In this case, we can use the DefaultParamInitializer, which is the // same one used for DenseLayer // For more complex layers, you may need to implement a custom parameter // initializer // See the various parameter initializers here: // https://github.com/deeplearning4j/deeplearning4j/tree/master/deeplearning4j-core/src/main/java/org/deeplearning4j/nn/params return cdaeParameter; }
Example #5
Source File: Bidirectional.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Override public ParamInitializer initializer() { if (initializer == null) { initializer = new BidirectionalParamInitializer(this); } return initializer; }
Example #6
Source File: FrozenLayerParamInitializer.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Override public Map<String, INDArray> getGradientsFromFlattened(NeuralNetConfiguration conf, INDArray gradientView) { FrozenLayer fl = (FrozenLayer) conf.getLayer(); Layer innerLayer = fl.getLayer(); ParamInitializer initializer = innerLayer.initializer(); conf.setLayer(innerLayer); Map<String, INDArray> m = initializer.getGradientsFromFlattened(conf, gradientView); conf.setLayer(fl); return m; }
Example #7
Source File: FrozenLayerParamInitializer.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Override public Map<String, INDArray> init(NeuralNetConfiguration conf, INDArray paramsView, boolean initializeParams) { FrozenLayer fl = (FrozenLayer) conf.getLayer(); Layer innerLayer = fl.getLayer(); ParamInitializer initializer = innerLayer.initializer(); conf.setLayer(innerLayer); Map<String, INDArray> m = initializer.init(conf, paramsView, initializeParams); conf.setLayer(fl); return m; }
Example #8
Source File: CustomLayer.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Override public ParamInitializer initializer() { //This method returns the parameter initializer for this type of layer //In this case, we can use the DefaultParamInitializer, which is the same one used for DenseLayer //For more complex layers, you may need to implement a custom parameter initializer //See the various parameter initializers here: //https://github.com/deeplearning4j/deeplearning4j/tree/master/deeplearning4j-core/src/main/java/org/deeplearning4j/nn/params return DefaultParamInitializer.getInstance(); }
Example #9
Source File: FrozenLayerWithBackpropParamInitializer.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Override public Map<String, INDArray> getGradientsFromFlattened(NeuralNetConfiguration conf, INDArray gradientView) { FrozenLayerWithBackprop fl = (FrozenLayerWithBackprop) conf.getLayer(); Layer innerLayer = fl.getUnderlying(); ParamInitializer initializer = innerLayer.initializer(); conf.setLayer(innerLayer); Map<String, INDArray> m = initializer.getGradientsFromFlattened(conf, gradientView); conf.setLayer(fl); return m; }
Example #10
Source File: FrozenLayerWithBackpropParamInitializer.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Override public Map<String, INDArray> init(NeuralNetConfiguration conf, INDArray paramsView, boolean initializeParams) { FrozenLayerWithBackprop fl = (FrozenLayerWithBackprop) conf.getLayer(); Layer innerLayer = fl.getUnderlying(); ParamInitializer initializer = innerLayer.initializer(); conf.setLayer(innerLayer); Map<String, INDArray> m = initializer.init(conf, paramsView, initializeParams); conf.setLayer(fl); return m; }
Example #11
Source File: NoParamLayer.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Override public ParamInitializer initializer() { return EmptyParamInitializer.getInstance(); }
Example #12
Source File: CustomOutputLayer.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Override public ParamInitializer initializer() { return DefaultParamInitializer.getInstance(); }
Example #13
Source File: LocalResponseNormalization.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Override public ParamInitializer initializer() { return EmptyParamInitializer.getInstance(); }
Example #14
Source File: SeparableConvolution2D.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Override public ParamInitializer initializer() { return SeparableConvolutionParamInitializer.getInstance(); }
Example #15
Source File: DenseLayer.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Override public ParamInitializer initializer() { return DefaultParamInitializer.getInstance(); }
Example #16
Source File: GlobalPoolingLayer.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Override public ParamInitializer initializer() { return EmptyParamInitializer.getInstance(); }
Example #17
Source File: CustomLayer.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Override public ParamInitializer initializer() { return DefaultParamInitializer.getInstance(); }
Example #18
Source File: DeepFMOutputConfiguration.java From jstarcraft-rns with Apache License 2.0 | 4 votes |
@Override public ParamInitializer initializer() { return DefaultParamInitializer.getInstance(); }
Example #19
Source File: GravesBidirectionalLSTM.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Override public ParamInitializer initializer() { return GravesBidirectionalLSTMParamInitializer.getInstance(); }
Example #20
Source File: MaskLayer.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Override public ParamInitializer initializer() { return EmptyParamInitializer.getInstance(); }
Example #21
Source File: EmbeddingLayer.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Override public ParamInitializer initializer() { return EmbeddingLayerParamInitializer.getInstance(); }
Example #22
Source File: Convolution3D.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Override public ParamInitializer initializer() { return Convolution3DParamInitializer.getInstance(); }
Example #23
Source File: VariationalAutoencoder.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Override public ParamInitializer initializer() { return VariationalAutoencoderParamInitializer.getInstance(); }
Example #24
Source File: DropoutLayer.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Override public ParamInitializer initializer() { return EmptyParamInitializer.getInstance(); }
Example #25
Source File: Cnn3DLossLayer.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Override public ParamInitializer initializer() { return EmptyParamInitializer.getInstance(); }
Example #26
Source File: OutputLayer.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Override public ParamInitializer initializer() { return DefaultParamInitializer.getInstance(); }
Example #27
Source File: LossLayer.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Override public ParamInitializer initializer() { return EmptyParamInitializer.getInstance(); }
Example #28
Source File: BaseWrapperLayer.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Override public ParamInitializer initializer() { return WrapperLayerParamInitializer.getInstance(); }
Example #29
Source File: Yolo2OutputLayer.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Override public ParamInitializer initializer() { return EmptyParamInitializer.getInstance(); }
Example #30
Source File: CustomLayer.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Override public ParamInitializer initializer() { return DefaultParamInitializer.getInstance(); }