Java Code Examples for org.nd4j.linalg.api.ndarray.INDArray#equalsWithEps()
The following examples show how to use
org.nd4j.linalg.api.ndarray.INDArray#equalsWithEps() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: KerasModelEndToEndTest.java From deeplearning4j with Apache License 2.0 | 6 votes |
private static void compareINDArrays(String label, INDArray expected, INDArray actual, double eps) { if(!expected.equalShapes(actual)){ throw new IllegalStateException("Shapes do not match for \"" + label + "\": got " + Arrays.toString(expected.shape()) + " vs " + Arrays.toString(actual.shape())); } INDArray diff = expected.sub(actual.castTo(expected.dataType())); double min = diff.minNumber().doubleValue(); double max = diff.maxNumber().doubleValue(); log.info(label + ": " + expected.equalsWithEps(actual, eps) + ", " + min + ", " + max); double threshold = 1e-7; double aAbsMax = Math.max(Math.abs(expected.minNumber().doubleValue()), Math.abs(expected.maxNumber().doubleValue())); double bAbsMax = Math.max(Math.abs(actual.minNumber().doubleValue()), Math.abs(actual.maxNumber().doubleValue())); // skip too small absolute inputs if (Math.abs(aAbsMax) > threshold && Math.abs(bAbsMax) > threshold) { boolean eq = expected.equalsWithEps(actual.castTo(expected.dataType()), eps); if(!eq){ System.out.println("Expected: " + Arrays.toString(expected.shape()) + ", actual: " + Arrays.toString(actual.shape())); System.out.println("Expected:\n" + expected); System.out.println("Actual: \n" + actual); } assertTrue("Output differs: " + label, eq); } }
Example 2
Source File: RegressionTest100b4.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testYoloHouseNumber() throws Exception { File f = Resources.asFile("regression_testing/100b4/HouseNumberDetection_100b4.bin"); ComputationGraph net = ComputationGraph.load(f, true); int nBoxes = 5; int nClasses = 10; ConvolutionLayer cl = (ConvolutionLayer) ((LayerVertex) net.getConfiguration().getVertices() .get("convolution2d_9")).getLayerConf().getLayer(); assertEquals(nBoxes * (5 + nClasses), cl.getNOut()); assertEquals(new ActivationIdentity(), cl.getActivationFn()); assertEquals(ConvolutionMode.Same, cl.getConvolutionMode()); assertEquals(new WeightInitXavier(), cl.getWeightInitFn()); assertArrayEquals(new int[]{1, 1}, cl.getKernelSize()); INDArray outExp; File f2 = Resources.asFile("regression_testing/100b4/HouseNumberDetection_Output_100b4.bin"); try (DataInputStream dis = new DataInputStream(new FileInputStream(f2))) { outExp = Nd4j.read(dis); } INDArray in; File f3 = Resources.asFile("regression_testing/100b4/HouseNumberDetection_Input_100b4.bin"); try (DataInputStream dis = new DataInputStream(new FileInputStream(f3))) { in = Nd4j.read(dis); } INDArray outAct = net.outputSingle(in); boolean eq = outExp.equalsWithEps(outAct.castTo(outExp.dataType()), 1e-3); assertTrue(eq); }
Example 3
Source File: RegressionTest100b3.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test @Ignore("AB 2019/05/23 - Failing on linux-x86_64-cuda-9.2 - see issue #7657") public void testYoloHouseNumber() throws Exception { File f = Resources.asFile("regression_testing/100b3/HouseNumberDetection_100b3.bin"); ComputationGraph net = ComputationGraph.load(f, true); int nBoxes = 5; int nClasses = 10; ConvolutionLayer cl = (ConvolutionLayer)((LayerVertex)net.getConfiguration().getVertices().get("convolution2d_9")).getLayerConf().getLayer(); assertEquals(nBoxes * (5 + nClasses), cl.getNOut()); assertEquals(new ActivationIdentity(), cl.getActivationFn()); assertEquals(ConvolutionMode.Same, cl.getConvolutionMode()); assertEquals(new WeightInitXavier(), cl.getWeightInitFn()); assertArrayEquals(new int[]{1,1}, cl.getKernelSize()); assertArrayEquals(new int[]{1,1}, cl.getKernelSize()); INDArray outExp; File f2 = Resources.asFile("regression_testing/100b3/HouseNumberDetection_Output_100b3.bin"); try(DataInputStream dis = new DataInputStream(new FileInputStream(f2))){ outExp = Nd4j.read(dis); } INDArray in; File f3 = Resources.asFile("regression_testing/100b3/HouseNumberDetection_Input_100b3.bin"); try(DataInputStream dis = new DataInputStream(new FileInputStream(f3))){ in = Nd4j.read(dis); } INDArray outAct = net.outputSingle(in); boolean eq = outExp.equalsWithEps(outAct.castTo(outExp.dataType()), 1e-3); assertTrue(eq); }
Example 4
Source File: RegressionTest100b6.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testYoloHouseNumber() throws Exception { File f = Resources.asFile("regression_testing/100b6/HouseNumberDetection_100b6.bin"); ComputationGraph net = ComputationGraph.load(f, true); int nBoxes = 5; int nClasses = 10; ConvolutionLayer cl = (ConvolutionLayer) ((LayerVertex) net.getConfiguration().getVertices() .get("convolution2d_9")).getLayerConf().getLayer(); assertEquals(nBoxes * (5 + nClasses), cl.getNOut()); assertEquals(new ActivationIdentity(), cl.getActivationFn()); assertEquals(ConvolutionMode.Same, cl.getConvolutionMode()); assertEquals(new WeightInitXavier(), cl.getWeightInitFn()); assertArrayEquals(new int[]{1, 1}, cl.getKernelSize()); INDArray outExp; File f2 = Resources.asFile("regression_testing/100b6/HouseNumberDetection_Output_100b6.bin"); try (DataInputStream dis = new DataInputStream(new FileInputStream(f2))) { outExp = Nd4j.read(dis); } INDArray in; File f3 = Resources.asFile("regression_testing/100b6/HouseNumberDetection_Input_100b6.bin"); try (DataInputStream dis = new DataInputStream(new FileInputStream(f3))) { in = Nd4j.read(dis); } INDArray outAct = net.outputSingle(in); boolean eq = outExp.equalsWithEps(outAct.castTo(outExp.dataType()), 1e-3); assertTrue(eq); }
Example 5
Source File: DeconvTests.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void compareKeras() throws Exception { File f = testDir.newFolder(); Resources.copyDirectory("keras/deconv", f); File[] files = f.listFiles(); Set<String> tests = new HashSet<>(); for(File file : files){ String n = file.getName(); if(!n.startsWith("mb")) continue; int idx = n.lastIndexOf('_'); String name = n.substring(0, idx); tests.add(name); } List<String> l = new ArrayList<>(tests); Collections.sort(l); assertFalse(l.isEmpty()); for(String s : l){ String s2 = s.replaceAll("[a-zA-Z]", ""); String[] nums = s2.split("_"); int mb = Integer.parseInt(nums[0]); int k = Integer.parseInt(nums[1]); int size = Integer.parseInt(nums[2]); int stride = Integer.parseInt(nums[3]); boolean same = s.contains("same"); int d = Integer.parseInt(nums[5]); boolean nchw = s.contains("nchw"); INDArray w = Nd4j.readNpy(new File(f, s + "_W.npy")); INDArray b = Nd4j.readNpy(new File(f, s + "_b.npy")); INDArray in = Nd4j.readNpy(new File(f, s + "_in.npy")).castTo(DataType.FLOAT); INDArray expOut = Nd4j.readNpy(new File(f, s + "_out.npy")); CustomOp op = DynamicCustomOp.builder("deconv2d") .addInputs(in, w, b) .addIntegerArguments( k, k, stride, stride, 0, 0, //padding d, d, same ? 1 : 0, nchw ? 0 : 1) .callInplace(false) .build(); INDArray out = Nd4j.create(op.calculateOutputShape().get(0)); out.assign(Double.NaN); op.addOutputArgument(out); Nd4j.exec(op); boolean eq = expOut.equalsWithEps(out, 1e-4); assertTrue(eq); } }
Example 6
Source File: ArraySavingListener.java From deeplearning4j with Apache License 2.0 | 4 votes |
public static void compare(File dir1, File dir2, double eps) throws Exception { File[] files1 = dir1.listFiles(); File[] files2 = dir2.listFiles(); Preconditions.checkNotNull(files1, "No files in directory 1: %s", dir1); Preconditions.checkNotNull(files2, "No files in directory 2: %s", dir2); Preconditions.checkState(files1.length == files2.length, "Different number of files: %s vs %s", files1.length, files2.length); Map<String,File> m1 = toMap(files1); Map<String,File> m2 = toMap(files2); for(File f : files1){ String name = f.getName(); String varName = name.substring(name.indexOf('_') + 1, name.length()-4); //Strip "x_" and ".bin" File f2 = m2.get(varName); INDArray arr1 = Nd4j.readBinary(f); INDArray arr2 = Nd4j.readBinary(f2); //TODO String arrays won't work here! boolean eq = arr1.equalsWithEps(arr2, eps); if(eq){ System.out.println("Equals: " + varName.replaceAll("__", "/")); } else { if(arr1.dataType() == DataType.BOOL){ INDArray xor = Nd4j.exec(new Xor(arr1, arr2)); int count = xor.castTo(DataType.INT).sumNumber().intValue(); System.out.println("FAILS: " + varName.replaceAll("__", "/") + " - boolean, # differences = " + count); System.out.println("\t" + f.getAbsolutePath()); System.out.println("\t" + f2.getAbsolutePath()); xor.close(); } else { INDArray sub = arr1.sub(arr2); INDArray diff = Nd4j.math.abs(sub); double maxDiff = diff.maxNumber().doubleValue(); System.out.println("FAILS: " + varName.replaceAll("__", "/") + " - max difference = " + maxDiff); System.out.println("\t" + f.getAbsolutePath()); System.out.println("\t" + f2.getAbsolutePath()); sub.close(); diff.close(); } } arr1.close(); arr2.close(); } }
Example 7
Source File: ND4JTestUtils.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Override public Boolean apply(INDArray i1, INDArray i2) { return i1.equalsWithEps(i2, eps); }
Example 8
Source File: RegressionTest100b4.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testCustomLayer() throws Exception { for (DataType dtype : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) { String dtypeName = dtype.toString().toLowerCase(); File f = Resources.asFile("regression_testing/100b4/CustomLayerExample_100b4_" + dtypeName + ".bin"); MultiLayerNetwork.load(f, true); MultiLayerNetwork net = MultiLayerNetwork.load(f, true); // net = net.clone(); DenseLayer l0 = (DenseLayer) net.getLayer(0).conf().getLayer(); assertEquals(new ActivationTanH(), l0.getActivationFn()); assertEquals(new L2Regularization(0.03), TestUtils.getL2Reg(l0)); assertEquals(new RmsProp(0.95), l0.getIUpdater()); CustomLayer l1 = (CustomLayer) net.getLayer(1).conf().getLayer(); assertEquals(new ActivationTanH(), l1.getActivationFn()); assertEquals(new ActivationSigmoid(), l1.getSecondActivationFunction()); assertEquals(new RmsProp(0.95), l1.getIUpdater()); INDArray outExp; File f2 = Resources .asFile("regression_testing/100b4/CustomLayerExample_Output_100b4_" + dtypeName + ".bin"); try (DataInputStream dis = new DataInputStream(new FileInputStream(f2))) { outExp = Nd4j.read(dis); } INDArray in; File f3 = Resources.asFile("regression_testing/100b4/CustomLayerExample_Input_100b4_" + dtypeName + ".bin"); try (DataInputStream dis = new DataInputStream(new FileInputStream(f3))) { in = Nd4j.read(dis); } assertEquals(dtype, in.dataType()); assertEquals(dtype, outExp.dataType()); assertEquals(dtype, net.params().dataType()); assertEquals(dtype, net.getFlattenedGradients().dataType()); assertEquals(dtype, net.getUpdater().getStateViewArray().dataType()); //System.out.println(Arrays.toString(net.params().data().asFloat())); INDArray outAct = net.output(in); assertEquals(dtype, outAct.dataType()); assertEquals(dtype, net.getLayerWiseConfigurations().getDataType()); assertEquals(dtype, net.params().dataType()); boolean eq = outExp.equalsWithEps(outAct, 0.01); assertTrue("Test for dtype: " + dtypeName + "\n" + outExp + " vs " + outAct, eq); } }
Example 9
Source File: RegressionTest100b6.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testCustomLayer() throws Exception { for (DataType dtype : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) { String dtypeName = dtype.toString().toLowerCase(); File f = Resources.asFile("regression_testing/100b6/CustomLayerExample_100b6_" + dtypeName + ".bin"); MultiLayerNetwork.load(f, true); MultiLayerNetwork net = MultiLayerNetwork.load(f, true); // net = net.clone(); DenseLayer l0 = (DenseLayer) net.getLayer(0).conf().getLayer(); assertEquals(new ActivationTanH(), l0.getActivationFn()); assertEquals(new L2Regularization(0.03), TestUtils.getL2Reg(l0)); assertEquals(new RmsProp(0.95), l0.getIUpdater()); CustomLayer l1 = (CustomLayer) net.getLayer(1).conf().getLayer(); assertEquals(new ActivationTanH(), l1.getActivationFn()); assertEquals(new ActivationSigmoid(), l1.getSecondActivationFunction()); assertEquals(new RmsProp(0.95), l1.getIUpdater()); INDArray outExp; File f2 = Resources .asFile("regression_testing/100b6/CustomLayerExample_Output_100b6_" + dtypeName + ".bin"); try (DataInputStream dis = new DataInputStream(new FileInputStream(f2))) { outExp = Nd4j.read(dis); } INDArray in; File f3 = Resources.asFile("regression_testing/100b6/CustomLayerExample_Input_100b6_" + dtypeName + ".bin"); try (DataInputStream dis = new DataInputStream(new FileInputStream(f3))) { in = Nd4j.read(dis); } assertEquals(dtype, in.dataType()); assertEquals(dtype, outExp.dataType()); assertEquals(dtype, net.params().dataType()); assertEquals(dtype, net.getFlattenedGradients().dataType()); assertEquals(dtype, net.getUpdater().getStateViewArray().dataType()); //System.out.println(Arrays.toString(net.params().data().asFloat())); INDArray outAct = net.output(in); assertEquals(dtype, outAct.dataType()); assertEquals(dtype, net.getLayerWiseConfigurations().getDataType()); assertEquals(dtype, net.params().dataType()); boolean eq = outExp.equalsWithEps(outAct, 0.01); assertTrue("Test for dtype: " + dtypeName + " - " + outExp + " vs " + outAct, eq); } }
Example 10
Source File: RegressionTest100a.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test @Ignore("AB 2019/05/23 - Failing on linux-x86_64-cuda-9.2 - see issue #7657") public void testYoloHouseNumber() throws Exception { File f = Resources.asFile("regression_testing/100a/HouseNumberDetection_100a.bin"); ComputationGraph net = ComputationGraph.load(f, true); int nBoxes = 5; int nClasses = 10; ConvolutionLayer cl = (ConvolutionLayer)((LayerVertex)net.getConfiguration().getVertices().get("convolution2d_9")).getLayerConf().getLayer(); assertEquals(nBoxes * (5 + nClasses), cl.getNOut()); assertEquals(new ActivationIdentity(), cl.getActivationFn()); assertEquals(ConvolutionMode.Same, cl.getConvolutionMode()); assertEquals(new WeightInitXavier(), cl.getWeightInitFn()); assertArrayEquals(new int[]{1,1}, cl.getKernelSize()); assertArrayEquals(new int[]{1,1}, cl.getKernelSize()); INDArray outExp; File f2 = Resources.asFile("regression_testing/100a/HouseNumberDetection_Output_100a.bin"); try(DataInputStream dis = new DataInputStream(new FileInputStream(f2))){ outExp = Nd4j.read(dis); } INDArray in; File f3 = Resources.asFile("regression_testing/100a/HouseNumberDetection_Input_100a.bin"); try(DataInputStream dis = new DataInputStream(new FileInputStream(f3))){ in = Nd4j.read(dis); } //Minor bug in 1.0.0-beta and earlier: not adding epsilon value to forward pass for batch norm //Which means: the record output doesn't have this. To account for this, we'll manually set eps to 0.0 here //https://github.com/deeplearning4j/deeplearning4j/issues/5836#issuecomment-405526228 for(Layer l : net.getLayers()){ if(l.conf().getLayer() instanceof BatchNormalization){ BatchNormalization bn = (BatchNormalization) l.conf().getLayer(); bn.setEps(0.0); } } INDArray outAct = net.outputSingle(in).castTo(outExp.dataType()); boolean eq = outExp.equalsWithEps(outAct, 1e-4); if(!eq){ log.info("Expected: {}", outExp); log.info("Actual: {}", outAct); } assertTrue("Output not equal", eq); }
Example 11
Source File: EmbeddingLayerTest.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testEmbeddingWeightInit(){ // https://github.com/eclipse/deeplearning4j/issues/8663 //The embedding layer weight initialization should be independent of the vocabulary size (nIn setting) for(WeightInit wi : new WeightInit[]{WeightInit.XAVIER, WeightInit.RELU, WeightInit.XAVIER_UNIFORM, WeightInit.LECUN_NORMAL}) { for (boolean seq : new boolean[]{false, true}) { Nd4j.getRandom().setSeed(12345); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .seed(12345) .list() .layer(seq ? new EmbeddingSequenceLayer.Builder().weightInit(wi).nIn(100).nOut(100).build() : new EmbeddingLayer.Builder().weightInit(wi).nIn(100).nOut(100).build()) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); Nd4j.getRandom().setSeed(12345); MultiLayerConfiguration conf2 = new NeuralNetConfiguration.Builder() .seed(12345) .list() .layer(seq ? new EmbeddingSequenceLayer.Builder().weightInit(wi).nIn(100).nOut(100).build() : new EmbeddingLayer.Builder().weightInit(wi).nIn(100).nOut(100).build()) .build(); MultiLayerNetwork net2 = new MultiLayerNetwork(conf2); net2.init(); Nd4j.getRandom().setSeed(12345); MultiLayerConfiguration conf3 = new NeuralNetConfiguration.Builder() .seed(12345) .list() .layer(seq ? new EmbeddingSequenceLayer.Builder().weightInit(wi).nIn(100000).nOut(100).build() : new EmbeddingLayer.Builder().weightInit(wi).nIn(100000).nOut(100).build()) .build(); MultiLayerNetwork net3 = new MultiLayerNetwork(conf3); net3.init(); INDArray p1 = net.params(); INDArray p2 = net2.params(); INDArray p3 = net3.params(); boolean eq = p1.equalsWithEps(p2, 1e-4); String str = (seq ? "EmbeddingSequenceLayer" : "EmbeddingLayer") + " - " + wi; assertTrue(str + " p1/p2 params not equal", eq); double m1 = p1.meanNumber().doubleValue(); double s1 = p1.stdNumber().doubleValue(); double m3 = p3.meanNumber().doubleValue(); double s3 = p3.stdNumber().doubleValue(); assertEquals(str, m1, m3, 0.1); assertEquals(str, s1, s3, 0.1); double re = relErr(s1, s3); assertTrue(str + " - " + re, re < 0.05); } } }
Example 12
Source File: TestDataTypes.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testDataTypesSimple() throws Exception { Map<DataType, INDArray> outMapTrain = new HashMap<>(); Map<DataType, INDArray> outMapTest = new HashMap<>(); for(DataType globalDtype : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) { Nd4j.setDefaultDataTypes(globalDtype, globalDtype); for(DataType netDType : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) { log.info("Starting test: global dtype = {}, net dtype = {}", globalDtype, netDType); assertEquals(globalDtype, Nd4j.dataType()); assertEquals(globalDtype, Nd4j.defaultFloatingPointType()); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .updater(new Sgd(1e-2)) .dataType(netDType) .convolutionMode(ConvolutionMode.Same) .activation(Activation.TANH) .seed(12345) .weightInit(WeightInit.XAVIER) .list() .layer(new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).padding(0, 0).nOut(3).build()) .layer(new SubsamplingLayer.Builder().kernelSize(2, 2).stride(1, 1).padding(0, 0).build()) .layer(new BatchNormalization.Builder().eps(1e-3).build()) .layer(new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).padding(0, 0).nOut(3).build()) .layer(new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build()) .setInputType(InputType.convolutionalFlat(28, 28, 1)) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); Field f1 = org.deeplearning4j.nn.layers.convolution.ConvolutionLayer.class.getDeclaredField("helper"); f1.setAccessible(true); Field f2 = org.deeplearning4j.nn.layers.convolution.subsampling.SubsamplingLayer.class.getDeclaredField("helper"); f2.setAccessible(true); Field f3 = org.deeplearning4j.nn.layers.normalization.BatchNormalization.class.getDeclaredField("helper"); f3.setAccessible(true); assertNotNull(f1.get(net.getLayer(0))); assertNotNull(f2.get(net.getLayer(1))); assertNotNull(f3.get(net.getLayer(2))); assertNotNull(f1.get(net.getLayer(3))); DataSet ds = new MnistDataSetIterator(32, true, 12345).next(); //Simple sanity checks: //System.out.println("STARTING FIT"); net.fit(ds); net.fit(ds); //System.out.println("STARTING OUTPUT"); INDArray outTrain = net.output(ds.getFeatures(), false); INDArray outTest = net.output(ds.getFeatures(), true); outMapTrain.put(netDType, outTrain.castTo(DataType.DOUBLE)); outMapTest.put(netDType, outTest.castTo(DataType.DOUBLE)); } } Nd4j.setDataType(DataType.DOUBLE); INDArray fp64Train = outMapTrain.get(DataType.DOUBLE); INDArray fp32Train = outMapTrain.get(DataType.FLOAT).castTo(DataType.DOUBLE); INDArray fp16Train = outMapTrain.get(DataType.HALF).castTo(DataType.DOUBLE); boolean eq64_32 = fp64Train.equalsWithEps(fp32Train, 1e-3); boolean eq64_16 = fp64Train.equalsWithEps(fp16Train, 1e-2); if(!eq64_32){ System.out.println("FP64/32"); System.out.println("fp64Train:\n" + fp64Train); System.out.println("fp32Train:\n" + fp32Train); } if(!eq64_16){ System.out.println("FP64/16"); System.out.println("fp64Train:\n" + fp64Train); System.out.println("fp16Train:\n" + fp16Train); } assertTrue(eq64_32); assertTrue(eq64_16); }
Example 13
Source File: TestSparkMultiLayerParameterAveraging.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test @Ignore("AB 2019/05/23 - Failing on CI only - passing locally. Possible precision or threading issue") public void testSeedRepeatability() throws Exception { MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).updater(new RmsProp()) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .weightInit(WeightInit.XAVIER).list() .layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(4).nOut(4) .activation(Activation.TANH).build()) .layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( LossFunctions.LossFunction.MCXENT).nIn(4).nOut(3).activation(Activation.SOFTMAX) .build()) .build(); Nd4j.getRandom().setSeed(12345); MultiLayerNetwork n1 = new MultiLayerNetwork(conf); n1.init(); Nd4j.getRandom().setSeed(12345); MultiLayerNetwork n2 = new MultiLayerNetwork(conf); n2.init(); Nd4j.getRandom().setSeed(12345); MultiLayerNetwork n3 = new MultiLayerNetwork(conf); n3.init(); SparkDl4jMultiLayer sparkNet1 = new SparkDl4jMultiLayer(sc, n1, new ParameterAveragingTrainingMaster.Builder(1).workerPrefetchNumBatches(5) .batchSizePerWorker(5).averagingFrequency(1).repartionData(Repartition.Always) .rngSeed(12345).build()); Thread.sleep(100); //Training master IDs are only unique if they are created at least 1 ms apart... SparkDl4jMultiLayer sparkNet2 = new SparkDl4jMultiLayer(sc, n2, new ParameterAveragingTrainingMaster.Builder(1).workerPrefetchNumBatches(5) .batchSizePerWorker(5).averagingFrequency(1).repartionData(Repartition.Always) .rngSeed(12345).build()); Thread.sleep(100); SparkDl4jMultiLayer sparkNet3 = new SparkDl4jMultiLayer(sc, n3, new ParameterAveragingTrainingMaster.Builder(1).workerPrefetchNumBatches(5) .batchSizePerWorker(5).averagingFrequency(1).repartionData(Repartition.Always) .rngSeed(98765).build()); List<DataSet> data = new ArrayList<>(); DataSetIterator iter = new IrisDataSetIterator(1, 150); while (iter.hasNext()) data.add(iter.next()); JavaRDD<DataSet> rdd = sc.parallelize(data); sparkNet1.fit(rdd); sparkNet2.fit(rdd); sparkNet3.fit(rdd); INDArray p1 = sparkNet1.getNetwork().params(); INDArray p2 = sparkNet2.getNetwork().params(); INDArray p3 = sparkNet3.getNetwork().params(); sparkNet1.getTrainingMaster().deleteTempFiles(sc); sparkNet2.getTrainingMaster().deleteTempFiles(sc); sparkNet3.getTrainingMaster().deleteTempFiles(sc); boolean eq1 = p1.equalsWithEps(p2, 0.01); boolean eq2 = p1.equalsWithEps(p3, 0.01); assertTrue("Model 1 and 2 params should be equal", eq1); assertFalse("Model 1 and 3 params shoud be different", eq2); }
Example 14
Source File: TestSparkComputationGraph.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Ignore("AB 2019/05/23 - Failing on CI only - passing locally. Possible precision or threading issue") public void testSeedRepeatability() throws Exception { ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).updater(Updater.RMSPROP) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in") .addLayer("0", new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(4).nOut(4) .activation(Activation.TANH).build(), "in") .addLayer("1", new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( LossFunctions.LossFunction.MCXENT).nIn(4).nOut(3).activation(Activation.SOFTMAX) .build(), "0") .setOutputs("1").build(); Nd4j.getRandom().setSeed(12345); ComputationGraph n1 = new ComputationGraph(conf.clone()); n1.init(); Nd4j.getRandom().setSeed(12345); ComputationGraph n2 = new ComputationGraph(conf.clone()); n2.init(); Nd4j.getRandom().setSeed(12345); ComputationGraph n3 = new ComputationGraph(conf.clone()); n3.init(); SparkComputationGraph sparkNet1 = new SparkComputationGraph(sc, n1, new ParameterAveragingTrainingMaster.Builder(1).workerPrefetchNumBatches(5) .batchSizePerWorker(5).averagingFrequency(1).repartionData(Repartition.Always) .rngSeed(12345).build()); Thread.sleep(100); //Training master IDs are only unique if they are created at least 1 ms apart... SparkComputationGraph sparkNet2 = new SparkComputationGraph(sc, n2, new ParameterAveragingTrainingMaster.Builder(1).workerPrefetchNumBatches(5) .batchSizePerWorker(5).averagingFrequency(1).repartionData(Repartition.Always) .rngSeed(12345).build()); Thread.sleep(100); SparkComputationGraph sparkNet3 = new SparkComputationGraph(sc, n3, new ParameterAveragingTrainingMaster.Builder(1).workerPrefetchNumBatches(5) .batchSizePerWorker(5).averagingFrequency(1).repartionData(Repartition.Always) .rngSeed(98765).build()); List<DataSet> data = new ArrayList<>(); DataSetIterator iter = new IrisDataSetIterator(1, 150); while (iter.hasNext()) data.add(iter.next()); JavaRDD<DataSet> rdd = sc.parallelize(data); sparkNet1.fit(rdd); sparkNet2.fit(rdd); sparkNet3.fit(rdd); INDArray p1 = sparkNet1.getNetwork().params(); INDArray p2 = sparkNet2.getNetwork().params(); INDArray p3 = sparkNet3.getNetwork().params(); sparkNet1.getTrainingMaster().deleteTempFiles(sc); sparkNet2.getTrainingMaster().deleteTempFiles(sc); sparkNet3.getTrainingMaster().deleteTempFiles(sc); boolean eq1 = p1.equalsWithEps(p2, 0.01); boolean eq2 = p1.equalsWithEps(p3, 0.01); assertTrue("Model 1 and 2 params should be equal", eq1); assertFalse("Model 1 and 3 params shoud be different", eq2); }