Java Code Examples for org.nd4j.linalg.activations.Activation#SELU
The following examples show how to use
org.nd4j.linalg.activations.Activation#SELU .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: KerasActivationUtils.java From deeplearning4j with Apache License 2.0 | 5 votes |
/** * Map Keras to DL4J activation functions. * * @param conf Keras layer configuration * @param kerasActivation String containing Keras activation function name * @return Activation enum value containing DL4J activation function name */ public static Activation mapToActivation(String kerasActivation, KerasLayerConfiguration conf) throws UnsupportedKerasConfigurationException { Activation dl4jActivation; if (kerasActivation.equals(conf.getKERAS_ACTIVATION_SOFTMAX())) { dl4jActivation = Activation.SOFTMAX; } else if (kerasActivation.equals(conf.getKERAS_ACTIVATION_SOFTPLUS())) { dl4jActivation = Activation.SOFTPLUS; } else if (kerasActivation.equals(conf.getKERAS_ACTIVATION_SOFTSIGN())) { dl4jActivation = Activation.SOFTSIGN; } else if (kerasActivation.equals(conf.getKERAS_ACTIVATION_RELU())) { dl4jActivation = Activation.RELU; } else if (kerasActivation.equals(conf.getKERAS_ACTIVATION_RELU6())) { dl4jActivation = Activation.RELU6; } else if (kerasActivation.equals(conf.getKERAS_ACTIVATION_ELU())) { dl4jActivation = Activation.ELU; } else if (kerasActivation.equals(conf.getKERAS_ACTIVATION_SELU())) { dl4jActivation = Activation.SELU; } else if (kerasActivation.equals(conf.getKERAS_ACTIVATION_TANH())) { dl4jActivation = Activation.TANH; } else if (kerasActivation.equals(conf.getKERAS_ACTIVATION_SIGMOID())) { dl4jActivation = Activation.SIGMOID; } else if (kerasActivation.equals(conf.getKERAS_ACTIVATION_HARD_SIGMOID())) { dl4jActivation = Activation.HARDSIGMOID; } else if (kerasActivation.equals(conf.getKERAS_ACTIVATION_LINEAR())) { dl4jActivation = Activation.IDENTITY; } else if (kerasActivation.equals(conf.getKERAS_ACTIVATION_SWISH())) { dl4jActivation = Activation.SWISH; } else { throw new UnsupportedKerasConfigurationException( "Unknown Keras activation function " + kerasActivation); } return dl4jActivation; }
Example 2
Source File: OutputLayerTest.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testCnnLossLayer(){ for(WorkspaceMode ws : WorkspaceMode.values()) { log.info("*** Testing workspace: " + ws); for (Activation a : new Activation[]{Activation.TANH, Activation.SELU}) { //Check that (A+identity) is equal to (identity+A), for activation A //i.e., should get same output and weight gradients for both MultiLayerConfiguration conf1 = new NeuralNetConfiguration.Builder().seed(12345L) .updater(new NoOp()) .convolutionMode(ConvolutionMode.Same) .inferenceWorkspaceMode(ws) .trainingWorkspaceMode(ws) .list() .layer(new ConvolutionLayer.Builder().nIn(3).nOut(4).activation(Activation.IDENTITY) .kernelSize(2, 2).stride(1, 1) .dist(new NormalDistribution(0, 1.0)) .updater(new NoOp()).build()) .layer(new CnnLossLayer.Builder(LossFunction.MSE) .activation(a) .build()) .build(); MultiLayerConfiguration conf2 = new NeuralNetConfiguration.Builder().seed(12345L) .updater(new NoOp()) .convolutionMode(ConvolutionMode.Same) .inferenceWorkspaceMode(ws) .trainingWorkspaceMode(ws) .list() .layer(new ConvolutionLayer.Builder().nIn(3).nOut(4).activation(a) .kernelSize(2, 2).stride(1, 1) .dist(new NormalDistribution(0, 1.0)) .updater(new NoOp()).build()) .layer(new CnnLossLayer.Builder(LossFunction.MSE) .activation(Activation.IDENTITY) .build()) .build(); MultiLayerNetwork mln = new MultiLayerNetwork(conf1); mln.init(); MultiLayerNetwork mln2 = new MultiLayerNetwork(conf2); mln2.init(); mln2.setParams(mln.params()); INDArray in = Nd4j.rand(new int[]{3, 3, 5, 5}); INDArray out1 = mln.output(in); INDArray out2 = mln2.output(in); assertEquals(out1, out2); INDArray labels = Nd4j.rand(out1.shape()); mln.setInput(in); mln.setLabels(labels); mln2.setInput(in); mln2.setLabels(labels); mln.computeGradientAndScore(); mln2.computeGradientAndScore(); assertEquals(mln.score(), mln2.score(), 1e-6); assertEquals(mln.gradient().gradient(), mln2.gradient().gradient()); //Also check computeScoreForExamples INDArray in2a = Nd4j.rand(new int[]{1, 3, 5, 5}); INDArray labels2a = Nd4j.rand(new int[]{1, 4, 5, 5}); INDArray in2 = Nd4j.concat(0, in2a, in2a); INDArray labels2 = Nd4j.concat(0, labels2a, labels2a); INDArray s = mln.scoreExamples(new DataSet(in2, labels2), false); assertArrayEquals(new long[]{2, 1}, s.shape()); assertEquals(s.getDouble(0), s.getDouble(1), 1e-6); TestUtils.testModelSerialization(mln); } } }
Example 3
Source File: OutputLayerTest.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testCnnLossLayerCompGraph(){ for(WorkspaceMode ws : WorkspaceMode.values()) { log.info("*** Testing workspace: " + ws); for (Activation a : new Activation[]{Activation.TANH, Activation.SELU}) { //Check that (A+identity) is equal to (identity+A), for activation A //i.e., should get same output and weight gradients for both ComputationGraphConfiguration conf1 = new NeuralNetConfiguration.Builder().seed(12345L) .updater(new NoOp()) .convolutionMode(ConvolutionMode.Same) .inferenceWorkspaceMode(ws) .trainingWorkspaceMode(ws) .graphBuilder() .addInputs("in") .addLayer("0", new ConvolutionLayer.Builder().nIn(3).nOut(4).activation(Activation.IDENTITY) .kernelSize(2, 2).stride(1, 1) .dist(new NormalDistribution(0, 1.0)) .updater(new NoOp()).build(), "in") .addLayer("1", new CnnLossLayer.Builder(LossFunction.MSE) .activation(a) .build(), "0") .setOutputs("1") .build(); ComputationGraphConfiguration conf2 = new NeuralNetConfiguration.Builder().seed(12345L) .updater(new NoOp()) .convolutionMode(ConvolutionMode.Same) .inferenceWorkspaceMode(ws) .trainingWorkspaceMode(ws) .graphBuilder() .addInputs("in") .addLayer("0", new ConvolutionLayer.Builder().nIn(3).nOut(4).activation(a) .kernelSize(2, 2).stride(1, 1) .dist(new NormalDistribution(0, 1.0)) .updater(new NoOp()).build(), "in") .addLayer("1", new CnnLossLayer.Builder(LossFunction.MSE) .activation(Activation.IDENTITY) .build(), "0") .setOutputs("1") .build(); ComputationGraph graph = new ComputationGraph(conf1); graph.init(); ComputationGraph graph2 = new ComputationGraph(conf2); graph2.init(); graph2.setParams(graph.params()); INDArray in = Nd4j.rand(new int[]{3, 3, 5, 5}); INDArray out1 = graph.outputSingle(in); INDArray out2 = graph2.outputSingle(in); assertEquals(out1, out2); INDArray labels = Nd4j.rand(out1.shape()); graph.setInput(0,in); graph.setLabels(labels); graph2.setInput(0,in); graph2.setLabels(labels); graph.computeGradientAndScore(); graph2.computeGradientAndScore(); assertEquals(graph.score(), graph2.score(), 1e-6); assertEquals(graph.gradient().gradient(), graph2.gradient().gradient()); //Also check computeScoreForExamples INDArray in2a = Nd4j.rand(new int[]{1, 3, 5, 5}); INDArray labels2a = Nd4j.rand(new int[]{1, 4, 5, 5}); INDArray in2 = Nd4j.concat(0, in2a, in2a); INDArray labels2 = Nd4j.concat(0, labels2a, labels2a); INDArray s = graph.scoreExamples(new DataSet(in2, labels2), false); assertArrayEquals(new long[]{2, 1}, s.shape()); assertEquals(s.getDouble(0), s.getDouble(1), 1e-6); TestUtils.testModelSerialization(graph); } } }