org.nd4j.linalg.activations.impl.ActivationSigmoid Java Examples
The following examples show how to use
org.nd4j.linalg.activations.impl.ActivationSigmoid.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: EmbedLayerTestCase.java From jstarcraft-ai with Apache License 2.0 | 6 votes |
@Override protected AbstractLayer<?> getOldFunction() { NeuralNetConfiguration neuralNetConfiguration = new NeuralNetConfiguration(); EmbeddingLayer layerConfiguration = new EmbeddingLayer(); layerConfiguration.setWeightInit(WeightInit.UNIFORM); layerConfiguration.setNIn(5); layerConfiguration.setNOut(2); layerConfiguration.setActivationFn(new ActivationSigmoid()); layerConfiguration.setL1(0.01D); layerConfiguration.setL1Bias(0.01D); layerConfiguration.setL2(0.05D); layerConfiguration.setL2Bias(0.05D); neuralNetConfiguration.setLayer(layerConfiguration); AbstractLayer<?> layer = AbstractLayer.class.cast(layerConfiguration.instantiate(neuralNetConfiguration, null, 0, Nd4j.zeros(12), true)); layer.setBackpropGradientsViewArray(Nd4j.zeros(12)); return layer; }
Example #2
Source File: CudnnLSTMHelper.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Override public boolean checkSupported(IActivation gateActivationFn, IActivation activationFn, boolean hasPeepholeConnections) { boolean supported = checkSupported(); if (!(gateActivationFn instanceof ActivationSigmoid)) { supported = false; log.warn("Not supported: Gate activation functions != ActivationSigmoid"); } if (!(activationFn instanceof ActivationTanH)) { supported = false; log.warn("Not supported: Layer activation functions != ActivationTanH"); } if (hasPeepholeConnections) { supported = false; log.warn("Not supported: LSTM layers with peephole connections"); } return supported; }
Example #3
Source File: WeightLayerTestCase.java From jstarcraft-ai with Apache License 2.0 | 6 votes |
@Override protected AbstractLayer<?> getOldFunction() { NeuralNetConfiguration neuralNetConfiguration = new NeuralNetConfiguration(); DenseLayer layerConfiguration = new DenseLayer(); layerConfiguration.setWeightInit(WeightInit.UNIFORM); layerConfiguration.setNIn(2); layerConfiguration.setNOut(1); layerConfiguration.setActivationFn(new ActivationSigmoid()); layerConfiguration.setL1(0.01D); layerConfiguration.setL1Bias(0.01D); layerConfiguration.setL2(0.05D); layerConfiguration.setL2Bias(0.05D); neuralNetConfiguration.setLayer(layerConfiguration); AbstractLayer<?> layer = AbstractLayer.class.cast(layerConfiguration.instantiate(neuralNetConfiguration, null, 0, Nd4j.zeros(3), true)); layer.setBackpropGradientsViewArray(Nd4j.zeros(3)); return layer; }
Example #4
Source File: MixtureDensityLossFunctionTestCase.java From jstarcraft-ai with Apache License 2.0 | 5 votes |
@Test @Override public void testGradient() throws Exception { EnvironmentContext context = EnvironmentFactory.getContext(); Future<?> task = context.doTask(() -> { LinkedList<KeyValue<IActivation, ActivationFunction>> activetionList = new LinkedList<>(); activetionList.add(new KeyValue<>(new ActivationSigmoid(), new SigmoidActivationFunction())); activetionList.add(new KeyValue<>(new ActivationSoftmax(), new SoftMaxActivationFunction())); for (KeyValue<IActivation, ActivationFunction> keyValue : activetionList) { INDArray array = Nd4j.linspace(-2.5D, 2.0D, 20).reshape(5, 4); INDArray marks = Nd4j.create(new double[] { 0D, 1D, 0D, 1D, 0D, 1D, 0D, 1D, 0D, 1D }).reshape(5, 2); ILossFunction oldFunction = getOldFunction(); INDArray value = oldFunction.computeGradient(marks, array.dup(), keyValue.getKey(), null); MathMatrix input = getMatrix(array.rows(), array.columns()).copyMatrix(getMatrix(array), false); MathMatrix output = getMatrix(input.getRowSize(), input.getColumnSize()); ActivationFunction function = keyValue.getValue(); function.forward(input, output); MathMatrix gradient = getMatrix(input.getRowSize(), input.getColumnSize()); LossFunction newFunction = getNewFunction(function); newFunction.doCache(getMatrix(marks.rows(), marks.columns()).copyMatrix(getMatrix(marks), false), output); newFunction.computeGradient(getMatrix(marks.rows(), marks.columns()).copyMatrix(getMatrix(marks), false), output, null, gradient); function.backward(input, gradient, output); System.out.println(value); System.out.println(output); Assert.assertTrue(equalMatrix(output, value)); } }); task.get(); }
Example #5
Source File: LossFunctionTestCase.java From jstarcraft-ai with Apache License 2.0 | 5 votes |
@Test public void testScore() throws Exception { EnvironmentContext context = EnvironmentFactory.getContext(); Future<?> task = context.doTask(() -> { LinkedList<KeyValue<IActivation, ActivationFunction>> activetionList = new LinkedList<>(); activetionList.add(new KeyValue<>(new ActivationSigmoid(), new SigmoidActivationFunction())); activetionList.add(new KeyValue<>(new ActivationSoftmax(), new SoftMaxActivationFunction())); for (KeyValue<IActivation, ActivationFunction> keyValue : activetionList) { INDArray array = Nd4j.linspace(-2.5D, 2.0D, 10).reshape(5, 2); INDArray marks = Nd4j.create(new double[] { 0D, 1D, 0D, 1D, 0D, 1D, 0D, 1D, 0D, 1D }).reshape(5, 2); ILossFunction oldFunction = getOldFunction(); double value = oldFunction.computeScore(marks, array.dup(), keyValue.getKey(), null, false); DenseMatrix input = getMatrix(array); DenseMatrix output = DenseMatrix.valueOf(input.getRowSize(), input.getColumnSize()); ActivationFunction function = keyValue.getValue(); function.forward(input, output); LossFunction newFunction = getNewFunction(function); newFunction.doCache(getMatrix(marks), output); double score = newFunction.computeScore(getMatrix(marks), output, null); System.out.println(value); System.out.println(score); if (Math.abs(value - score) > MathUtility.EPSILON) { Assert.fail(); } } }); task.get(); }
Example #6
Source File: LossFunctionTestCase.java From jstarcraft-ai with Apache License 2.0 | 5 votes |
@Test public void testGradient() throws Exception { EnvironmentContext context = EnvironmentFactory.getContext(); Future<?> task = context.doTask(() -> { LinkedList<KeyValue<IActivation, ActivationFunction>> activetionList = new LinkedList<>(); activetionList.add(new KeyValue<>(new ActivationSigmoid(), new SigmoidActivationFunction())); activetionList.add(new KeyValue<>(new ActivationSoftmax(), new SoftMaxActivationFunction())); for (KeyValue<IActivation, ActivationFunction> keyValue : activetionList) { INDArray array = Nd4j.linspace(-2.5D, 2.0D, 10).reshape(5, 2); INDArray marks = Nd4j.create(new double[] { 0D, 1D, 0D, 1D, 0D, 1D, 0D, 1D, 0D, 1D }).reshape(5, 2); ILossFunction oldFunction = getOldFunction(); INDArray value = oldFunction.computeGradient(marks, array.dup(), keyValue.getKey(), null); DenseMatrix input = getMatrix(array); DenseMatrix output = DenseMatrix.valueOf(input.getRowSize(), input.getColumnSize()); ActivationFunction function = keyValue.getValue(); function.forward(input, output); DenseMatrix gradient = DenseMatrix.valueOf(input.getRowSize(), input.getColumnSize()); LossFunction newFunction = getNewFunction(function); newFunction.doCache(getMatrix(marks), output); newFunction.computeGradient(getMatrix(marks), output, null, gradient); function.backward(input, gradient, output); System.out.println(value); System.out.println(output); Assert.assertTrue(equalMatrix(output, value)); } }); task.get(); }
Example #7
Source File: MixtureDensityLossFunctionTestCase.java From jstarcraft-ai with Apache License 2.0 | 5 votes |
@Test @Override public void testScore() throws Exception { EnvironmentContext context = EnvironmentFactory.getContext(); Future<?> task = context.doTask(() -> { LinkedList<KeyValue<IActivation, ActivationFunction>> activetionList = new LinkedList<>(); activetionList.add(new KeyValue<>(new ActivationSigmoid(), new SigmoidActivationFunction())); activetionList.add(new KeyValue<>(new ActivationSoftmax(), new SoftMaxActivationFunction())); for (KeyValue<IActivation, ActivationFunction> keyValue : activetionList) { INDArray array = Nd4j.linspace(-2.5D, 2.0D, 20).reshape(5, 4); INDArray marks = Nd4j.create(new double[] { 0D, 1D, 0D, 1D, 0D, 1D, 0D, 1D, 0D, 1D }).reshape(5, 2); ILossFunction oldFunction = getOldFunction(); float value = (float) oldFunction.computeScore(marks, array.dup(), keyValue.getKey(), null, false); MathMatrix input = getMatrix(array.rows(), array.columns()).copyMatrix(getMatrix(array), false); MathMatrix output = getMatrix(input.getRowSize(), input.getColumnSize()); ActivationFunction function = keyValue.getValue(); function.forward(input, output); LossFunction newFunction = getNewFunction(function); newFunction.doCache(getMatrix(marks.rows(), marks.columns()).copyMatrix(getMatrix(marks), false), output); float score = newFunction.computeScore(getMatrix(marks.rows(), marks.columns()).copyMatrix(getMatrix(marks), false), output, null); System.out.println(value); System.out.println(score); if (!MathUtility.equal(value, score)) { Assert.fail(); } } }); task.get(); }
Example #8
Source File: LossFunctionTestCase.java From jstarcraft-rns with Apache License 2.0 | 5 votes |
@Test public void testScore() throws Exception { EnvironmentContext context = EnvironmentFactory.getContext(); Future<?> task = context.doTask(() -> { LinkedList<KeyValue<IActivation, ActivationFunction>> activetionList = new LinkedList<>(); activetionList.add(new KeyValue<>(new ActivationSigmoid(), new SigmoidActivationFunction())); // activetionList.add(new KeyValue<>(new ActivationSoftmax(), new SoftMaxActivationFunction())); for (KeyValue<IActivation, ActivationFunction> keyValue : activetionList) { INDArray array = Nd4j.linspace(-2.5D, 2.0D, 10).reshape(5, 2); INDArray marks = Nd4j.create(new double[] { 0D, 1D, 0D, 1D, 0D, 1D, 0D, 1D, 0D, 1D }).reshape(5, 2); ILossFunction oldFunction = getOldFunction(marks); double value = oldFunction.computeScore(marks, array.dup(), keyValue.getKey(), null, false); Nd4jMatrix input = getMatrix(array.dup()); Nd4jMatrix output = new Nd4jMatrix(Nd4j.zeros(input.getRowSize(), input.getColumnSize())); ActivationFunction function = keyValue.getValue(); function.forward(input, output); LossFunction newFunction = getNewFunction(marks, function); newFunction.doCache(getMatrix(marks), output); double score = newFunction.computeScore(getMatrix(marks), output, null); System.out.println(value); System.out.println(score); if (Math.abs(value - score) > MathUtility.EPSILON) { Assert.fail(); } } }); task.get(); }
Example #9
Source File: LossFunctionTest.java From nd4j with Apache License 2.0 | 5 votes |
@Test public void testClippingXENT() throws Exception { ILossFunction l1 = new LossBinaryXENT(0); ILossFunction l2 = new LossBinaryXENT(); INDArray labels = Nd4j.getExecutioner().exec(new BernoulliDistribution(Nd4j.create(3,5), 0.5)); INDArray preOut = Nd4j.valueArrayOf(3, 5, -1000.0); IActivation a = new ActivationSigmoid(); double score1 = l1.computeScore(labels, preOut.dup(), a, null, false); assertTrue(Double.isNaN(score1)); double score2 = l2.computeScore(labels, preOut.dup(), a, null, false); assertFalse(Double.isNaN(score2)); INDArray grad1 = l1.computeGradient(labels, preOut.dup(), a, null); INDArray grad2 = l2.computeGradient(labels, preOut.dup(), a, null); MatchCondition c1 = new MatchCondition(grad1, Conditions.isNan()); MatchCondition c2 = new MatchCondition(grad2, Conditions.isNan()); int match1 = Nd4j.getExecutioner().exec(c1, Integer.MAX_VALUE).getInt(0); int match2 = Nd4j.getExecutioner().exec(c2, Integer.MAX_VALUE).getInt(0); assertTrue(match1 > 0); assertEquals(0, match2); }
Example #10
Source File: LossFunctionTest.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testClippingXENT() { ILossFunction l1 = new LossBinaryXENT(0); ILossFunction l2 = new LossBinaryXENT(); INDArray labels = Nd4j.getExecutioner().exec(new BernoulliDistribution(Nd4j.create(3, 5), 0.5)); INDArray preOut = Nd4j.valueArrayOf(3, 5, -1000.0); IActivation a = new ActivationSigmoid(); double score1 = l1.computeScore(labels, preOut.dup(), a, null, false); assertTrue(Double.isNaN(score1)); double score2 = l2.computeScore(labels, preOut.dup(), a, null, false); assertFalse(Double.isNaN(score2)); INDArray grad1 = l1.computeGradient(labels, preOut.dup(), a, null); INDArray grad2 = l2.computeGradient(labels, preOut.dup(), a, null); MatchCondition c1 = new MatchCondition(grad1, Conditions.isNan()); MatchCondition c2 = new MatchCondition(grad2, Conditions.isNan()); int match1 = Nd4j.getExecutioner().exec(c1).getInt(0); int match2 = Nd4j.getExecutioner().exec(c2).getInt(0); assertTrue(match1 > 0); assertEquals(0, match2); }
Example #11
Source File: BernoulliReconstructionDistribution.java From deeplearning4j with Apache License 2.0 | 5 votes |
/** * @param activationFn Activation function. Sigmoid generally; must be bounded in range 0 to 1 */ public BernoulliReconstructionDistribution(IActivation activationFn) { this.activationFn = activationFn; if (!(activationFn instanceof ActivationSigmoid) && !(activationFn instanceof ActivationHardSigmoid)) { log.warn("Using BernoulliRecontructionDistribution with activation function \"" + activationFn + "\"." + " Using sigmoid/hard sigmoid is recommended to bound probabilities in range 0 to 1"); } }
Example #12
Source File: OCNNOutputLayerTest.java From deeplearning4j with Apache License 2.0 | 5 votes |
private MultiLayerNetwork getSingleLayer() { int numHidden = 2; MultiLayerConfiguration configuration = new NeuralNetConfiguration.Builder() .seed(12345) .weightInit(WeightInit.XAVIER) .miniBatch(true) .updater(new Adam(0.1)) // .updater(Nesterovs.builder() // .momentum(0.1) // .learningRateSchedule(new StepSchedule( // ScheduleType.EPOCH, // 1e-2, // 0.1, // 20)).build()) .list(new DenseLayer.Builder().activation(new ActivationReLU()) .nIn(4).nOut(2).build(), new org.deeplearning4j.nn.conf.ocnn.OCNNOutputLayer.Builder() .nIn(2).activation(new ActivationSigmoid()).initialRValue(0.1) .nu(0.1) .hiddenLayerSize(numHidden).build()) .build(); MultiLayerNetwork network = new MultiLayerNetwork(configuration); network.init(); network.setListeners(new ScoreIterationListener(1)); return network; }
Example #13
Source File: OCNNOutputLayerTest.java From deeplearning4j with Apache License 2.0 | 5 votes |
public MultiLayerNetwork getGradientCheckNetwork(int numHidden) { MultiLayerConfiguration configuration = new NeuralNetConfiguration.Builder() .dataType(DataType.DOUBLE) .seed(42).updater(new NoOp()).miniBatch(false) .list(new DenseLayer.Builder().activation(new ActivationIdentity()).nIn(4).nOut(4).build(), new org.deeplearning4j.nn.conf.ocnn.OCNNOutputLayer.Builder().nIn(4) .nu(0.002).activation(new ActivationSigmoid()) .hiddenLayerSize(numHidden).build()) .build(); MultiLayerNetwork network = new MultiLayerNetwork(configuration); network.init(); return network; }
Example #14
Source File: LossFunctionTestCase.java From jstarcraft-rns with Apache License 2.0 | 5 votes |
@Test public void testGradient() throws Exception { EnvironmentContext context = EnvironmentFactory.getContext(); Future<?> task = context.doTask(() -> { LinkedList<KeyValue<IActivation, ActivationFunction>> activetionList = new LinkedList<>(); activetionList.add(new KeyValue<>(new ActivationSigmoid(), new SigmoidActivationFunction())); // activetionList.add(new KeyValue<>(new ActivationSoftmax(), new SoftMaxActivationFunction())); for (KeyValue<IActivation, ActivationFunction> keyValue : activetionList) { INDArray array = Nd4j.linspace(-2.5D, 2.0D, 10).reshape(5, 2); INDArray marks = Nd4j.create(new double[] { 0D, 1D, 0D, 1D, 0D, 1D, 0D, 1D, 0D, 1D }).reshape(5, 2); ILossFunction oldFunction = getOldFunction(marks); INDArray value = oldFunction.computeGradient(marks, array.dup(), keyValue.getKey(), null); Nd4jMatrix input = getMatrix(array.dup()); Nd4jMatrix output = new Nd4jMatrix(Nd4j.zeros(input.getRowSize(), input.getColumnSize())); ActivationFunction function = keyValue.getValue(); function.forward(input, output); Nd4jMatrix gradient = new Nd4jMatrix(Nd4j.zeros(input.getRowSize(), input.getColumnSize())); LossFunction newFunction = getNewFunction(marks, function); newFunction.doCache(getMatrix(marks), output); newFunction.computeGradient(getMatrix(marks), output, null, gradient); function.backward(input, gradient, output); System.out.println(value); System.out.println(output); Assert.assertTrue(equalMatrix(output, value)); } }); task.get(); }
Example #15
Source File: SigmoidActivationFunctionTestCase.java From jstarcraft-ai with Apache License 2.0 | 4 votes |
@Override protected IActivation getOldFunction() { return new ActivationSigmoid(); }
Example #16
Source File: RegressionTest100b4.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testCustomLayer() throws Exception { for (DataType dtype : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) { String dtypeName = dtype.toString().toLowerCase(); File f = Resources.asFile("regression_testing/100b4/CustomLayerExample_100b4_" + dtypeName + ".bin"); MultiLayerNetwork.load(f, true); MultiLayerNetwork net = MultiLayerNetwork.load(f, true); // net = net.clone(); DenseLayer l0 = (DenseLayer) net.getLayer(0).conf().getLayer(); assertEquals(new ActivationTanH(), l0.getActivationFn()); assertEquals(new L2Regularization(0.03), TestUtils.getL2Reg(l0)); assertEquals(new RmsProp(0.95), l0.getIUpdater()); CustomLayer l1 = (CustomLayer) net.getLayer(1).conf().getLayer(); assertEquals(new ActivationTanH(), l1.getActivationFn()); assertEquals(new ActivationSigmoid(), l1.getSecondActivationFunction()); assertEquals(new RmsProp(0.95), l1.getIUpdater()); INDArray outExp; File f2 = Resources .asFile("regression_testing/100b4/CustomLayerExample_Output_100b4_" + dtypeName + ".bin"); try (DataInputStream dis = new DataInputStream(new FileInputStream(f2))) { outExp = Nd4j.read(dis); } INDArray in; File f3 = Resources.asFile("regression_testing/100b4/CustomLayerExample_Input_100b4_" + dtypeName + ".bin"); try (DataInputStream dis = new DataInputStream(new FileInputStream(f3))) { in = Nd4j.read(dis); } assertEquals(dtype, in.dataType()); assertEquals(dtype, outExp.dataType()); assertEquals(dtype, net.params().dataType()); assertEquals(dtype, net.getFlattenedGradients().dataType()); assertEquals(dtype, net.getUpdater().getStateViewArray().dataType()); //System.out.println(Arrays.toString(net.params().data().asFloat())); INDArray outAct = net.output(in); assertEquals(dtype, outAct.dataType()); assertEquals(dtype, net.getLayerWiseConfigurations().getDataType()); assertEquals(dtype, net.params().dataType()); boolean eq = outExp.equalsWithEps(outAct, 0.01); assertTrue("Test for dtype: " + dtypeName + "\n" + outExp + " vs " + outAct, eq); } }
Example #17
Source File: GravesBidirectionalLSTMTest.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testGravesBidirectionalLSTMForwardPassHelper() throws Exception { //GravesBidirectionalLSTM.activateHelper() has different behaviour (due to optimizations) when forBackprop==true vs false //But should otherwise provide identical activations Nd4j.getRandom().setSeed(12345); final int nIn = 10; final int layerSize = 15; final int miniBatchSize = 4; final int timeSeriesLength = 7; final NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder() .layer(new org.deeplearning4j.nn.conf.layers.GravesBidirectionalLSTM.Builder().nIn(nIn) .nOut(layerSize) .dist(new UniformDistribution(0, 1)).activation(Activation.TANH).build()) .build(); long numParams = conf.getLayer().initializer().numParams(conf); INDArray params = Nd4j.create(1, numParams); final GravesBidirectionalLSTM lstm = (GravesBidirectionalLSTM) conf.getLayer().instantiate(conf, null, 0, params, true, params.dataType()); final INDArray input = Nd4j.rand(new int[] {miniBatchSize, nIn, timeSeriesLength}); lstm.setInput(input, LayerWorkspaceMgr.noWorkspaces()); final INDArray fwdPassFalse = LSTMHelpers.activateHelper(lstm, lstm.conf(), new ActivationSigmoid(), lstm.input(), lstm.getParam(GravesBidirectionalLSTMParamInitializer.RECURRENT_WEIGHT_KEY_FORWARDS), lstm.getParam(GravesBidirectionalLSTMParamInitializer.INPUT_WEIGHT_KEY_FORWARDS), lstm.getParam(GravesBidirectionalLSTMParamInitializer.BIAS_KEY_FORWARDS), false, null, null, false, true, GravesBidirectionalLSTMParamInitializer.INPUT_WEIGHT_KEY_FORWARDS, null, true, null, CacheMode.NONE, LayerWorkspaceMgr.noWorkspaces(), true).fwdPassOutput; final INDArray[] fwdPassTrue = LSTMHelpers.activateHelper(lstm, lstm.conf(), new ActivationSigmoid(), lstm.input(), lstm.getParam(GravesBidirectionalLSTMParamInitializer.RECURRENT_WEIGHT_KEY_FORWARDS), lstm.getParam(GravesBidirectionalLSTMParamInitializer.INPUT_WEIGHT_KEY_FORWARDS), lstm.getParam(GravesBidirectionalLSTMParamInitializer.BIAS_KEY_FORWARDS), false, null, null, true, true, GravesBidirectionalLSTMParamInitializer.INPUT_WEIGHT_KEY_FORWARDS, null, true, null, CacheMode.NONE, LayerWorkspaceMgr.noWorkspaces(), true).fwdPassOutputAsArrays; //I have no idea what the heck this does --Ben for (int i = 0; i < timeSeriesLength; i++) { final INDArray sliceFalse = fwdPassFalse.tensorAlongDimension(i, 1, 0); final INDArray sliceTrue = fwdPassTrue[i]; assertTrue(sliceFalse.equals(sliceTrue)); } }