org.nd4j.linalg.lossfunctions.impl.LossMCXENT Java Examples
The following examples show how to use
org.nd4j.linalg.lossfunctions.impl.LossMCXENT.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TestMultiLayerSpace.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void testILossFunctionGetsSet() { ILossFunction lossFunction = new LossMCXENT(Nd4j.create(new float[] {1f, 2f}, new long[]{1,2})); MultiLayerConfiguration expected = new NeuralNetConfiguration.Builder().updater(new Sgd(0.005)).seed(12345).list() .layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build()) .layer(1, new DenseLayer.Builder().nIn(10).nOut(10).build()).layer(2, new OutputLayer.Builder().lossFunction(lossFunction) .activation(Activation.SOFTMAX).nIn(10).nOut(5).build()) .build(); MultiLayerSpace mls = new MultiLayerSpace.Builder().updater(new Sgd(0.005)).seed(12345) .addLayer(new DenseLayerSpace.Builder().nIn(10).nOut(10).build(), new FixedValue<>(2)) //2 identical layers .addLayer(new OutputLayerSpace.Builder().iLossFunction(lossFunction).activation(Activation.SOFTMAX).nIn(10).nOut(5).build()) .build(); int nParams = mls.numParameters(); assertEquals(0, nParams); MultiLayerConfiguration conf = mls.getValue(new double[0]).getMultiLayerConfiguration(); assertEquals(expected, conf); }
Example #2
Source File: RegressionTest080.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void regressionTestLSTM1() throws Exception { File f = Resources.asFile("regression_testing/080/080_ModelSerializer_Regression_LSTM_1.zip"); MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); MultiLayerConfiguration conf = net.getLayerWiseConfigurations(); assertEquals(3, conf.getConfs().size()); GravesLSTM l0 = (GravesLSTM) conf.getConf(0).getLayer(); assertTrue(l0.getActivationFn() instanceof ActivationTanH); assertEquals(3, l0.getNIn()); assertEquals(4, l0.getNOut()); assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l0.getGradientNormalization()); assertEquals(1.5, l0.getGradientNormalizationThreshold(), 1e-5); GravesBidirectionalLSTM l1 = (GravesBidirectionalLSTM) conf.getConf(1).getLayer(); assertTrue(l1.getActivationFn() instanceof ActivationSoftSign); assertEquals(4, l1.getNIn()); assertEquals(4, l1.getNOut()); assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l1.getGradientNormalization()); assertEquals(1.5, l1.getGradientNormalizationThreshold(), 1e-5); RnnOutputLayer l2 = (RnnOutputLayer) conf.getConf(2).getLayer(); assertEquals(4, l2.getNIn()); assertEquals(5, l2.getNOut()); assertTrue(l2.getActivationFn() instanceof ActivationSoftmax); assertTrue(l2.getLossFn() instanceof LossMCXENT); }
Example #3
Source File: RegressionTest060.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void regressionTestCGLSTM1() throws Exception { File f = Resources.asFile("regression_testing/060/060_ModelSerializer_Regression_CG_LSTM_1.zip"); ComputationGraph net = ModelSerializer.restoreComputationGraph(f, true); ComputationGraphConfiguration conf = net.getConfiguration(); assertEquals(3, conf.getVertices().size()); GravesLSTM l0 = (GravesLSTM) ((LayerVertex) conf.getVertices().get("0")).getLayerConf().getLayer(); assertEquals("tanh", l0.getActivationFn().toString()); assertEquals(3, l0.getNIn()); assertEquals(4, l0.getNOut()); assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l0.getGradientNormalization()); assertEquals(1.5, l0.getGradientNormalizationThreshold(), 1e-5); GravesBidirectionalLSTM l1 = (GravesBidirectionalLSTM) ((LayerVertex) conf.getVertices().get("1")).getLayerConf().getLayer(); assertEquals("softsign", l1.getActivationFn().toString()); assertEquals(4, l1.getNIn()); assertEquals(4, l1.getNOut()); assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l1.getGradientNormalization()); assertEquals(1.5, l1.getGradientNormalizationThreshold(), 1e-5); RnnOutputLayer l2 = (RnnOutputLayer) ((LayerVertex) conf.getVertices().get("2")).getLayerConf().getLayer(); assertEquals(4, l2.getNIn()); assertEquals(5, l2.getNOut()); assertEquals("softmax", l2.getActivationFn().toString()); assertTrue(l2.getLossFn() instanceof LossMCXENT); }
Example #4
Source File: RegressionTest060.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void regressionTestLSTM1() throws Exception { File f = Resources.asFile("regression_testing/060/060_ModelSerializer_Regression_LSTM_1.zip"); MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); MultiLayerConfiguration conf = net.getLayerWiseConfigurations(); assertEquals(3, conf.getConfs().size()); GravesLSTM l0 = (GravesLSTM) conf.getConf(0).getLayer(); assertEquals("tanh", l0.getActivationFn().toString()); assertEquals(3, l0.getNIn()); assertEquals(4, l0.getNOut()); assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l0.getGradientNormalization()); assertEquals(1.5, l0.getGradientNormalizationThreshold(), 1e-5); GravesBidirectionalLSTM l1 = (GravesBidirectionalLSTM) conf.getConf(1).getLayer(); assertEquals("softsign", l1.getActivationFn().toString()); assertEquals(4, l1.getNIn()); assertEquals(4, l1.getNOut()); assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l1.getGradientNormalization()); assertEquals(1.5, l1.getGradientNormalizationThreshold(), 1e-5); RnnOutputLayer l2 = (RnnOutputLayer) conf.getConf(2).getLayer(); assertEquals(4, l2.getNIn()); assertEquals(5, l2.getNOut()); assertEquals("softmax", l2.getActivationFn().toString()); assertTrue(l2.getLossFn() instanceof LossMCXENT); }
Example #5
Source File: RegressionTest060.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void regressionTestMLP1() throws Exception { File f = Resources.asFile("regression_testing/060/060_ModelSerializer_Regression_MLP_1.zip"); MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); MultiLayerConfiguration conf = net.getLayerWiseConfigurations(); assertEquals(2, conf.getConfs().size()); DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer(); assertEquals("relu", l0.getActivationFn().toString()); assertEquals(3, l0.getNIn()); assertEquals(4, l0.getNOut()); assertEquals(new WeightInitXavier(), l0.getWeightInitFn()); assertEquals(new Nesterovs(0.15, 0.9), l0.getIUpdater()); assertEquals(0.15, ((Nesterovs)l0.getIUpdater()).getLearningRate(), 1e-6); OutputLayer l1 = (OutputLayer) conf.getConf(1).getLayer(); assertEquals("softmax", l1.getActivationFn().toString()); assertTrue(l1.getLossFn() instanceof LossMCXENT); assertEquals(4, l1.getNIn()); assertEquals(5, l1.getNOut()); assertEquals(new WeightInitXavier(), l1.getWeightInitFn()); assertEquals(new Nesterovs(0.15, 0.9), l1.getIUpdater()); assertEquals(0.9, ((Nesterovs)l1.getIUpdater()).getMomentum(), 1e-6); assertEquals(0.15, ((Nesterovs)l1.getIUpdater()).getLearningRate(), 1e-6); int numParams = (int)net.numParams(); assertEquals(Nd4j.linspace(1, numParams, numParams, Nd4j.dataType()).reshape(1,numParams), net.params()); int updaterSize = (int) new Nesterovs().stateSize(numParams); assertEquals(Nd4j.linspace(1, updaterSize, updaterSize, Nd4j.dataType()).reshape(1,numParams), net.getUpdater().getStateViewArray()); }
Example #6
Source File: RegressionTest071.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void regressionTestCGLSTM1() throws Exception { File f = Resources.asFile("regression_testing/071/071_ModelSerializer_Regression_CG_LSTM_1.zip"); ComputationGraph net = ModelSerializer.restoreComputationGraph(f, true); ComputationGraphConfiguration conf = net.getConfiguration(); assertEquals(3, conf.getVertices().size()); GravesLSTM l0 = (GravesLSTM) ((LayerVertex) conf.getVertices().get("0")).getLayerConf().getLayer(); assertEquals("tanh", l0.getActivationFn().toString()); assertEquals(3, l0.getNIn()); assertEquals(4, l0.getNOut()); assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l0.getGradientNormalization()); assertEquals(1.5, l0.getGradientNormalizationThreshold(), 1e-5); GravesBidirectionalLSTM l1 = (GravesBidirectionalLSTM) ((LayerVertex) conf.getVertices().get("1")).getLayerConf().getLayer(); assertEquals("softsign", l1.getActivationFn().toString()); assertEquals(4, l1.getNIn()); assertEquals(4, l1.getNOut()); assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l1.getGradientNormalization()); assertEquals(1.5, l1.getGradientNormalizationThreshold(), 1e-5); RnnOutputLayer l2 = (RnnOutputLayer) ((LayerVertex) conf.getVertices().get("2")).getLayerConf().getLayer(); assertEquals(4, l2.getNIn()); assertEquals(5, l2.getNOut()); assertEquals("softmax", l2.getActivationFn().toString()); assertTrue(l2.getLossFn() instanceof LossMCXENT); }
Example #7
Source File: RegressionTest071.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void regressionTestLSTM1() throws Exception { File f = Resources.asFile("regression_testing/071/071_ModelSerializer_Regression_LSTM_1.zip"); MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); MultiLayerConfiguration conf = net.getLayerWiseConfigurations(); assertEquals(3, conf.getConfs().size()); GravesLSTM l0 = (GravesLSTM) conf.getConf(0).getLayer(); assertEquals("tanh", l0.getActivationFn().toString()); assertEquals(3, l0.getNIn()); assertEquals(4, l0.getNOut()); assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l0.getGradientNormalization()); assertEquals(1.5, l0.getGradientNormalizationThreshold(), 1e-5); GravesBidirectionalLSTM l1 = (GravesBidirectionalLSTM) conf.getConf(1).getLayer(); assertEquals("softsign", l1.getActivationFn().toString()); assertEquals(4, l1.getNIn()); assertEquals(4, l1.getNOut()); assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l1.getGradientNormalization()); assertEquals(1.5, l1.getGradientNormalizationThreshold(), 1e-5); RnnOutputLayer l2 = (RnnOutputLayer) conf.getConf(2).getLayer(); assertEquals(4, l2.getNIn()); assertEquals(5, l2.getNOut()); assertEquals("softmax", l2.getActivationFn().toString()); assertTrue(l2.getLossFn() instanceof LossMCXENT); }
Example #8
Source File: RegressionTest071.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void regressionTestMLP1() throws Exception { File f = Resources.asFile("regression_testing/071/071_ModelSerializer_Regression_MLP_1.zip"); MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); MultiLayerConfiguration conf = net.getLayerWiseConfigurations(); assertEquals(2, conf.getConfs().size()); DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer(); assertEquals("relu", l0.getActivationFn().toString()); assertEquals(3, l0.getNIn()); assertEquals(4, l0.getNOut()); assertEquals(new WeightInitXavier(), l0.getWeightInitFn()); assertEquals(new Nesterovs(0.15, 0.9), l0.getIUpdater()); assertEquals(0.15, ((Nesterovs)l0.getIUpdater()).getLearningRate(), 1e-6); OutputLayer l1 = (OutputLayer) conf.getConf(1).getLayer(); assertEquals("softmax", l1.getActivationFn().toString()); assertTrue(l1.getLossFn() instanceof LossMCXENT); assertEquals(4, l1.getNIn()); assertEquals(5, l1.getNOut()); assertEquals(new WeightInitXavier(), l1.getWeightInitFn()); assertEquals(0.9, ((Nesterovs)l1.getIUpdater()).getMomentum(), 1e-6); assertEquals(0.9, ((Nesterovs)l1.getIUpdater()).getMomentum(), 1e-6); assertEquals(0.15, ((Nesterovs)l1.getIUpdater()).getLearningRate(), 1e-6); long numParams = (int)net.numParams(); assertEquals(Nd4j.linspace(1, numParams, numParams).reshape(1,numParams), net.params()); int updaterSize = (int) new Nesterovs().stateSize(numParams); assertEquals(Nd4j.linspace(1, updaterSize, updaterSize).reshape(1,numParams), net.getUpdater().getStateViewArray()); }
Example #9
Source File: RegressionTest080.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void regressionTestCGLSTM1() throws Exception { File f = Resources.asFile("regression_testing/080/080_ModelSerializer_Regression_CG_LSTM_1.zip"); ComputationGraph net = ModelSerializer.restoreComputationGraph(f, true); ComputationGraphConfiguration conf = net.getConfiguration(); assertEquals(3, conf.getVertices().size()); GravesLSTM l0 = (GravesLSTM) ((LayerVertex) conf.getVertices().get("0")).getLayerConf().getLayer(); assertTrue(l0.getActivationFn() instanceof ActivationTanH); assertEquals(3, l0.getNIn()); assertEquals(4, l0.getNOut()); assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l0.getGradientNormalization()); assertEquals(1.5, l0.getGradientNormalizationThreshold(), 1e-5); GravesBidirectionalLSTM l1 = (GravesBidirectionalLSTM) ((LayerVertex) conf.getVertices().get("1")).getLayerConf().getLayer(); assertTrue(l1.getActivationFn() instanceof ActivationSoftSign); assertEquals(4, l1.getNIn()); assertEquals(4, l1.getNOut()); assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l1.getGradientNormalization()); assertEquals(1.5, l1.getGradientNormalizationThreshold(), 1e-5); RnnOutputLayer l2 = (RnnOutputLayer) ((LayerVertex) conf.getVertices().get("2")).getLayerConf().getLayer(); assertEquals(4, l2.getNIn()); assertEquals(5, l2.getNOut()); assertTrue(l2.getActivationFn() instanceof ActivationSoftmax); assertTrue(l2.getLossFn() instanceof LossMCXENT); }
Example #10
Source File: RegressionTest080.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void regressionTestMLP1() throws Exception { File f = Resources.asFile("regression_testing/080/080_ModelSerializer_Regression_MLP_1.zip"); MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); MultiLayerConfiguration conf = net.getLayerWiseConfigurations(); assertEquals(2, conf.getConfs().size()); DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer(); assertTrue(l0.getActivationFn() instanceof ActivationReLU); assertEquals(3, l0.getNIn()); assertEquals(4, l0.getNOut()); assertEquals(new WeightInitXavier(), l0.getWeightInitFn()); assertTrue(l0.getIUpdater() instanceof Nesterovs); Nesterovs n = (Nesterovs) l0.getIUpdater(); assertEquals(0.9, n.getMomentum(), 1e-6); assertEquals(0.15, ((Nesterovs)l0.getIUpdater()).getLearningRate(), 1e-6); assertEquals(0.15, n.getLearningRate(), 1e-6); OutputLayer l1 = (OutputLayer) conf.getConf(1).getLayer(); assertTrue(l1.getActivationFn() instanceof ActivationSoftmax); assertTrue(l1.getLossFn() instanceof LossMCXENT); assertEquals(4, l1.getNIn()); assertEquals(5, l1.getNOut()); assertEquals(new WeightInitXavier(), l1.getWeightInitFn()); assertTrue(l1.getIUpdater() instanceof Nesterovs); assertEquals(0.9, ((Nesterovs)l1.getIUpdater()).getMomentum(), 1e-6); assertEquals(0.15, ((Nesterovs)l1.getIUpdater()).getLearningRate(), 1e-6); assertEquals(0.15, n.getLearningRate(), 1e-6); int numParams = (int)net.numParams(); assertEquals(Nd4j.linspace(1, numParams, numParams, Nd4j.dataType()).reshape(1,numParams), net.params()); int updaterSize = (int) new Nesterovs().stateSize(numParams); assertEquals(Nd4j.linspace(1, updaterSize, updaterSize, Nd4j.dataType()).reshape(1,numParams), net.getUpdater().getStateViewArray()); }
Example #11
Source File: RegressionTest050.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void regressionTestMLP1() throws Exception { File f = Resources.asFile("regression_testing/050/050_ModelSerializer_Regression_MLP_1.zip"); MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); MultiLayerConfiguration conf = net.getLayerWiseConfigurations(); assertEquals(2, conf.getConfs().size()); DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer(); assertEquals("relu", l0.getActivationFn().toString()); assertEquals(3, l0.getNIn()); assertEquals(4, l0.getNOut()); assertEquals(new WeightInitXavier(), l0.getWeightInitFn()); assertEquals(new Nesterovs(0.15, 0.9), l0.getIUpdater()); assertEquals(0.15, ((Nesterovs)l0.getIUpdater()).getLearningRate(), 1e-6); OutputLayer l1 = (OutputLayer) conf.getConf(1).getLayer(); assertEquals("softmax", l1.getActivationFn().toString()); assertTrue(l1.getLossFn() instanceof LossMCXENT); assertEquals(4, l1.getNIn()); assertEquals(5, l1.getNOut()); assertEquals(new WeightInitXavier(), l1.getWeightInitFn()); assertEquals(new Nesterovs(0.15, 0.9), l1.getIUpdater()); assertEquals(0.9, ((Nesterovs)l1.getIUpdater()).getMomentum(), 1e-6); assertEquals(0.15, ((Nesterovs)l1.getIUpdater()).getLearningRate(), 1e-6); int numParams = (int)net.numParams(); assertEquals(Nd4j.linspace(1, numParams, numParams, Nd4j.dataType()).reshape(1,numParams), net.params()); int updaterSize = (int) new Nesterovs().stateSize(net.numParams()); assertEquals(Nd4j.linspace(1, updaterSize, updaterSize, Nd4j.dataType()).reshape(1,numParams), net.getUpdater().getStateViewArray()); }
Example #12
Source File: OutputLayerUtil.java From deeplearning4j with Apache License 2.0 | 5 votes |
/** * Validate the output layer (or loss layer) configuration, to detect invalid consfiugrations. A DL4JInvalidConfigException * will be thrown for invalid configurations (like softmax + nOut=1).<br> * <p> * If the specified layer is not an output layer, this is a no-op * * @param layerName Name of the layer * @param nOut Number of outputs for the layer * @param isLossLayer Should be true for loss layers (no params), false for output layers * @param activation Activation function * @param lossFunction Loss function */ public static void validateOutputLayerConfiguration(String layerName, long nOut, boolean isLossLayer, IActivation activation, ILossFunction lossFunction){ //nOut = 1 + softmax if(!isLossLayer && nOut == 1 && activation instanceof ActivationSoftmax){ //May not have valid nOut for LossLayer throw new DL4JInvalidConfigException("Invalid output layer configuration for layer \"" + layerName + "\": Softmax + nOut=1 networks " + "are not supported. Softmax cannot be used with nOut=1 as the output will always be exactly 1.0 " + "regardless of the input. " + COMMON_MSG); } //loss function required probability, but activation is outside 0-1 range if(lossFunctionExpectsProbability(lossFunction) && activationExceedsZeroOneRange(activation, isLossLayer)){ throw new DL4JInvalidConfigException("Invalid output layer configuration for layer \"" + layerName + "\": loss function " + lossFunction + " expects activations to be in the range 0 to 1 (probabilities) but activation function " + activation + " does not bound values to this 0 to 1 range. This indicates a likely invalid network configuration. " + COMMON_MSG); } //Common mistake: softmax + xent if(activation instanceof ActivationSoftmax && lossFunction instanceof LossBinaryXENT){ throw new DL4JInvalidConfigException("Invalid output layer configuration for layer \"" + layerName + "\": softmax activation function in combination " + "with LossBinaryXENT (binary cross entropy loss function). For multi-class classification, use softmax + " + "MCXENT (multi-class cross entropy); for binary multi-label classification, use sigmoid + XENT. " + COMMON_MSG); } //Common mistake: sigmoid + mcxent if(activation instanceof ActivationSigmoid && lossFunction instanceof LossMCXENT){ throw new DL4JInvalidConfigException("Invalid output layer configuration for layer \"" + layerName + "\": sigmoid activation function in combination " + "with LossMCXENT (multi-class cross entropy loss function). For multi-class classification, use softmax + " + "MCXENT (multi-class cross entropy); for binary multi-label classification, use sigmoid + XENT. " + COMMON_MSG); } }
Example #13
Source File: CustomerRetentionPredictionExample.java From Java-Deep-Learning-Cookbook with MIT License | 4 votes |
public static void main(String[] args) throws IOException, InterruptedException { final int labelIndex=11; final int batchSize=8; final int numClasses=2; final INDArray weightsArray = Nd4j.create(new double[]{0.57, 0.75}); final RecordReader recordReader = generateReader(new ClassPathResource("Churn_Modelling.csv").getFile()); final DataSetIterator dataSetIterator = new RecordReaderDataSetIterator.Builder(recordReader,batchSize) .classification(labelIndex,numClasses) .build(); final DataNormalization dataNormalization = new NormalizerStandardize(); dataNormalization.fit(dataSetIterator); dataSetIterator.setPreProcessor(dataNormalization); final DataSetIteratorSplitter dataSetIteratorSplitter = new DataSetIteratorSplitter(dataSetIterator,1250,0.8); log.info("Building Model------------------->>>>>>>>>"); final MultiLayerConfiguration configuration = new NeuralNetConfiguration.Builder() .weightInit(WeightInit.RELU_UNIFORM) .updater(new Adam(0.015D)) .list() .layer(new DenseLayer.Builder().nIn(11).nOut(6).activation(Activation.RELU).dropOut(0.9).build()) .layer(new DenseLayer.Builder().nIn(6).nOut(6).activation(Activation.RELU).dropOut(0.9).build()) .layer(new DenseLayer.Builder().nIn(6).nOut(4).activation(Activation.RELU).dropOut(0.9).build()) .layer(new OutputLayer.Builder(new LossMCXENT(weightsArray)).nIn(4).nOut(2).activation(Activation.SOFTMAX).build()) .build(); final UIServer uiServer = UIServer.getInstance(); final StatsStorage statsStorage = new InMemoryStatsStorage(); final MultiLayerNetwork multiLayerNetwork = new MultiLayerNetwork(configuration); multiLayerNetwork.init(); multiLayerNetwork.setListeners(new ScoreIterationListener(100), new StatsListener(statsStorage)); uiServer.attach(statsStorage); multiLayerNetwork.fit(dataSetIteratorSplitter.getTrainIterator(),100); final Evaluation evaluation = multiLayerNetwork.evaluate(dataSetIteratorSplitter.getTestIterator(),Arrays.asList("0","1")); System.out.println(evaluation.stats()); final File file = new File("model.zip"); ModelSerializer.writeModel(multiLayerNetwork,file,true); ModelSerializer.addNormalizerToModel(file,dataNormalization); }
Example #14
Source File: RegressionTest100b6.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testSyntheticBidirectionalRNNGraph() throws Exception { File f = Resources.asFile("regression_testing/100b6/SyntheticBidirectionalRNNGraph_100b6.bin"); ComputationGraph net = ComputationGraph.load(f, true); Bidirectional l0 = (Bidirectional) net.getLayer("rnn1").conf().getLayer(); LSTM l1 = (LSTM) l0.getFwd(); assertEquals(16, l1.getNOut()); assertEquals(new ActivationReLU(), l1.getActivationFn()); assertEquals(new L2Regularization(0.0001), TestUtils.getL2Reg(l1)); LSTM l2 = (LSTM) l0.getBwd(); assertEquals(16, l2.getNOut()); assertEquals(new ActivationReLU(), l2.getActivationFn()); assertEquals(new L2Regularization(0.0001), TestUtils.getL2Reg(l2)); Bidirectional l3 = (Bidirectional) net.getLayer("rnn2").conf().getLayer(); SimpleRnn l4 = (SimpleRnn) l3.getFwd(); assertEquals(16, l4.getNOut()); assertEquals(new ActivationReLU(), l4.getActivationFn()); assertEquals(new L2Regularization(0.0001), TestUtils.getL2Reg(l4)); SimpleRnn l5 = (SimpleRnn) l3.getBwd(); assertEquals(16, l5.getNOut()); assertEquals(new ActivationReLU(), l5.getActivationFn()); assertEquals(new L2Regularization(0.0001), TestUtils.getL2Reg(l5)); MergeVertex mv = (MergeVertex) net.getVertex("concat"); GlobalPoolingLayer gpl = (GlobalPoolingLayer) net.getLayer("pooling").conf().getLayer(); assertEquals(PoolingType.MAX, gpl.getPoolingType()); assertArrayEquals(new int[]{2}, gpl.getPoolingDimensions()); assertTrue(gpl.isCollapseDimensions()); OutputLayer outl = (OutputLayer) net.getLayer("out").conf().getLayer(); assertEquals(3, outl.getNOut()); assertEquals(new LossMCXENT(), outl.getLossFn()); INDArray outExp; File f2 = Resources.asFile("regression_testing/100b6/SyntheticBidirectionalRNNGraph_Output_100b6.bin"); try (DataInputStream dis = new DataInputStream(new FileInputStream(f2))) { outExp = Nd4j.read(dis); } INDArray in; File f3 = Resources.asFile("regression_testing/100b6/SyntheticBidirectionalRNNGraph_Input_100b6.bin"); try (DataInputStream dis = new DataInputStream(new FileInputStream(f3))) { in = Nd4j.read(dis); } INDArray outAct = net.output(in)[0]; assertEquals(outExp, outAct); }
Example #15
Source File: RegressionTest100b4.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testSyntheticBidirectionalRNNGraph() throws Exception { File f = Resources.asFile("regression_testing/100b4/SyntheticBidirectionalRNNGraph_100b4.bin"); ComputationGraph net = ComputationGraph.load(f, true); Bidirectional l0 = (Bidirectional) net.getLayer("rnn1").conf().getLayer(); LSTM l1 = (LSTM) l0.getFwd(); assertEquals(16, l1.getNOut()); assertEquals(new ActivationReLU(), l1.getActivationFn()); assertEquals(new L2Regularization(0.0001), TestUtils.getL2Reg(l1)); LSTM l2 = (LSTM) l0.getBwd(); assertEquals(16, l2.getNOut()); assertEquals(new ActivationReLU(), l2.getActivationFn()); assertEquals(new L2Regularization(0.0001), TestUtils.getL2Reg(l2)); Bidirectional l3 = (Bidirectional) net.getLayer("rnn2").conf().getLayer(); SimpleRnn l4 = (SimpleRnn) l3.getFwd(); assertEquals(16, l4.getNOut()); assertEquals(new ActivationReLU(), l4.getActivationFn()); assertEquals(new L2Regularization(0.0001), TestUtils.getL2Reg(l4)); SimpleRnn l5 = (SimpleRnn) l3.getBwd(); assertEquals(16, l5.getNOut()); assertEquals(new ActivationReLU(), l5.getActivationFn()); assertEquals(new L2Regularization(0.0001), TestUtils.getL2Reg(l5)); MergeVertex mv = (MergeVertex) net.getVertex("concat"); GlobalPoolingLayer gpl = (GlobalPoolingLayer) net.getLayer("pooling").conf().getLayer(); assertEquals(PoolingType.MAX, gpl.getPoolingType()); assertArrayEquals(new int[]{2}, gpl.getPoolingDimensions()); assertTrue(gpl.isCollapseDimensions()); OutputLayer outl = (OutputLayer) net.getLayer("out").conf().getLayer(); assertEquals(3, outl.getNOut()); assertEquals(new LossMCXENT(), outl.getLossFn()); INDArray outExp; File f2 = Resources.asFile("regression_testing/100b4/SyntheticBidirectionalRNNGraph_Output_100b4.bin"); try (DataInputStream dis = new DataInputStream(new FileInputStream(f2))) { outExp = Nd4j.read(dis); } INDArray in; File f3 = Resources.asFile("regression_testing/100b4/SyntheticBidirectionalRNNGraph_Input_100b4.bin"); try (DataInputStream dis = new DataInputStream(new FileInputStream(f3))) { in = Nd4j.read(dis); } INDArray outAct = net.output(in)[0]; assertEquals(outExp, outAct); }
Example #16
Source File: OutputLayerUtil.java From deeplearning4j with Apache License 2.0 | 4 votes |
public static boolean lossFunctionExpectsProbability(ILossFunction lf) { //Note LossNegativeLogLikelihood extends LossMCXENT return lf instanceof LossMCXENT || lf instanceof LossBinaryXENT; }
Example #17
Source File: MCXENTLossFunctionTestCase.java From jstarcraft-ai with Apache License 2.0 | 4 votes |
@Override protected ILossFunction getOldFunction() { return new LossMCXENT(); }
Example #18
Source File: CustomerRetentionPredictionExample.java From Java-Deep-Learning-Cookbook with MIT License | 4 votes |
public static void main(String[] args) throws IOException, InterruptedException { final int labelIndex=11; final int batchSize=8; final int numClasses=2; final INDArray weightsArray = Nd4j.create(new double[]{0.57, 0.75}); final RecordReader recordReader = generateReader(new ClassPathResource("Churn_Modelling.csv").getFile()); final DataSetIterator dataSetIterator = new RecordReaderDataSetIterator.Builder(recordReader,batchSize) .classification(labelIndex,numClasses) .build(); final DataNormalization dataNormalization = new NormalizerStandardize(); dataNormalization.fit(dataSetIterator); dataSetIterator.setPreProcessor(dataNormalization); final DataSetIteratorSplitter dataSetIteratorSplitter = new DataSetIteratorSplitter(dataSetIterator,1250,0.8); log.info("Building Model------------------->>>>>>>>>"); final MultiLayerConfiguration configuration = new NeuralNetConfiguration.Builder() .weightInit(WeightInit.RELU_UNIFORM) .updater(new Adam(0.015D)) .list() .layer(new DenseLayer.Builder().nIn(11).nOut(6).activation(Activation.RELU).dropOut(0.9).build()) .layer(new DenseLayer.Builder().nIn(6).nOut(6).activation(Activation.RELU).dropOut(0.9).build()) .layer(new DenseLayer.Builder().nIn(6).nOut(4).activation(Activation.RELU).dropOut(0.9).build()) .layer(new OutputLayer.Builder(new LossMCXENT(weightsArray)).nIn(4).nOut(2).activation(Activation.SOFTMAX).build()) .build(); final UIServer uiServer = UIServer.getInstance(); final StatsStorage statsStorage = new InMemoryStatsStorage(); final MultiLayerNetwork multiLayerNetwork = new MultiLayerNetwork(configuration); multiLayerNetwork.init(); multiLayerNetwork.setListeners(new ScoreIterationListener(100), new StatsListener(statsStorage)); uiServer.attach(statsStorage); multiLayerNetwork.fit(dataSetIteratorSplitter.getTrainIterator(),100); final Evaluation evaluation = multiLayerNetwork.evaluate(dataSetIteratorSplitter.getTestIterator(),Arrays.asList("0","1")); System.out.println(evaluation.stats()); final File file = new File("model.zip"); ModelSerializer.writeModel(multiLayerNetwork,file,true); ModelSerializer.addNormalizerToModel(file,dataNormalization); }