Java Code Examples for org.nd4j.autodiff.samediff.SDVariable#std()
The following examples show how to use
org.nd4j.autodiff.samediff.SDVariable#std() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TransformOpValidation.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void testReplaceWhereScalar() { for (Condition c : new Condition[]{Conditions.lessThan(0.5), Conditions.greaterThan(0.5), Conditions.equals(0.5)}) { log.info("Testing condition: " + c.getClass().getSimpleName()); INDArray inArr = Nd4j.rand(DataType.DOUBLE, 3, 4); SameDiff sd = SameDiff.create(); SDVariable in = sd.var("in", inArr); SDVariable where = sd.replaceWhere(in, 10, c); INDArray exp = inArr.dup(); BooleanIndexing.replaceWhere(exp, 10, c); SDVariable loss = where.std(true); TestCase tc = new TestCase(sd); String err = OpValidation.validate(tc); assertNull(err); } }
Example 2
Source File: ShapeOpValidation.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void testGather2(){ SameDiff sd = SameDiff.create(); SDVariable input = sd.var("in", Nd4j.arange(6).castTo(DataType.FLOAT).reshape(2,3)); SDVariable indices = sd.constant("indices", Nd4j.createFromArray(0)); SDVariable gathered = sd.gather(input, indices, 1); SDVariable loss = gathered.std(true); sd.output((Map<String,INDArray>)null, gathered.name()); sd.setLossVariables(gathered.name()); String err = OpValidation.validate(new TestCase(sd) .gradCheckEpsilon(1e-3) .gradCheckMaxRelativeError(1e-4)); assertNull(err); }
Example 3
Source File: ReductionOpValidation.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void testStdev() { List<String> errors = new ArrayList<>(); for (Pair<INDArray, String> p : NDArrayCreationUtil.getAllTestMatricesWithShape(3, 4, 12345, DataType.DOUBLE)) { for (boolean biasCorrected : new boolean[]{false, true}) { SameDiff sd = SameDiff.create(); SDVariable var = sd.var("in", p.getFirst()); SDVariable stdev = var.std(biasCorrected); INDArray expOut = p.getFirst().std(biasCorrected); TestCase tc = new TestCase(sd) .testName(p.getSecond() + " - biasCorrected=" + biasCorrected) .expected(stdev, expOut) .gradientCheck(false); String err = OpValidation.validate(tc); if (err != null) { errors.add(err); } } } assertEquals(errors.toString(), 0, errors.size()); }
Example 4
Source File: TransformOpValidation.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void testReplaceWhereArray() { for (Condition c : new Condition[]{Conditions.lessThan(0.5), Conditions.greaterThan(0.5), Conditions.equals(0.5)}) { INDArray inArr = Nd4j.rand(3, 4); INDArray inArr2 = Nd4j.valueArrayOf(3, 4, 10); SameDiff sd = SameDiff.create(); SDVariable in = sd.var("in", inArr); SDVariable in2 = sd.var("in2", inArr2); SDVariable where = sd.replaceWhere(in, in2, c); INDArray exp = inArr.dup(); BooleanIndexing.replaceWhere(exp, inArr2, c); SDVariable loss = where.std(true); TestCase tc = new TestCase(sd); String err = OpValidation.validate(tc); assertNull(err); } }
Example 5
Source File: ActivationGradChecks.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void testActivationGradientCheck1(){ Nd4j.getRandom().setSeed(12345); SameDiff sd = SameDiff.create(); SDVariable in = sd.var("x", Nd4j.rand(DataType.DOUBLE, 3, 4)); SDVariable tanh = sd.math().tanh("tanh", in); SDVariable loss = tanh.std(true); GradCheckUtil.ActGradConfig c = GradCheckUtil.ActGradConfig.builder() .sd(sd) .activationGradsToCheck(Collections.singletonList("tanh")) .build(); boolean ok = GradCheckUtil.checkActivationGradients(c); assertTrue(ok); }
Example 6
Source File: ActivationGradChecks.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void testActivationGradientCheck2(){ Nd4j.getRandom().setSeed(12345); SameDiff sd = SameDiff.create(); SDVariable x = sd.placeHolder("x", DataType.DOUBLE, 3, 4); SDVariable y = sd.var("y", Nd4j.rand(DataType.DOUBLE, 4, 5)); SDVariable mmul = x.mmul("mmul", y); SDVariable sigmoid = sd.math().tanh("sigmoid", mmul); SDVariable loss = sigmoid.std(true); Map<String, INDArray> m = new HashMap<>(); m.put("x", Nd4j.rand(DataType.DOUBLE, 3, 4)); GradCheckUtil.ActGradConfig c = GradCheckUtil.ActGradConfig.builder() .sd(sd) .placeholderValues(m) .activationGradsToCheck(Arrays.asList("sigmoid", "mmul")) .subset(GradCheckUtil.Subset.RANDOM) .maxPerParam(10) .build(); boolean ok = GradCheckUtil.checkActivationGradients(c); assertTrue(ok); }
Example 7
Source File: ShapeOpValidation.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void testConstant(){ //OpValidationSuite.ignoreFailing(); //Case 0: no shape SameDiff sd = SameDiff.create(); INDArray ia = Nd4j.create(new double[]{1,2,3}); SDVariable in = sd.var(ia); SDVariable loss = in.std(true); assertNull(OpValidation.validate(new TestCase(sd).expected(in, ia))); //Case 1: shape is provided + scalar sd = SameDiff.create(); ia = Nd4j.scalar(3.0); in = sd.var(ia); SDVariable constant = sd.constant(Nd4j.create(DataType.FLOAT, 3,4,5)); INDArray exp = Nd4j.valueArrayOf(new long[]{3,4,5}, 3.0); loss = constant.std(true); assertNull(OpValidation.validate(new TestCase(sd) .gradientCheck(false) .expected(constant, Nd4j.create(DataType.FLOAT, 3,4,5)))); }
Example 8
Source File: MiscOpValidation.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void testStopGradient(){ SameDiff sd = SameDiff.create(); SDVariable w = sd.var("w", Nd4j.rand(DataType.DOUBLE, 3, 4)); SDVariable v = new StopGradient(sd, w).outputVariable(); SDVariable loss = v.std(true); Map<String,INDArray> gm = sd.calculateGradients(null, v.name(), w.name()); INDArray vArr = gm.get(v.name()); INDArray wArr = gm.get(w.name()); // System.out.println(vArr); // System.out.println(wArr); assertEquals(Nd4j.zeros(DataType.DOUBLE, 3, 4), wArr); }
Example 9
Source File: LayerOpValidation.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testBiasAdd_nchw_nhwc() { Nd4j.getRandom().setSeed(12345); for (boolean nchw : new boolean[]{true, false}) { log.info("Starting test: {}", nchw ? "nchw" : "nhwc"); SameDiff sameDiff = SameDiff.create(); SDVariable in = sameDiff.var("input", Nd4j.rand(DataType.DOUBLE, nchw ? new long[]{2, 4, 3, 3} : new long[]{2, 3, 3, 4})); SDVariable b = sameDiff.var("bias", Nd4j.rand(DataType.DOUBLE, new long[]{4})); SDVariable bAdd = sameDiff.nn.biasAdd(in, b, nchw); SDVariable loss = bAdd.std(true); INDArray exp = in.getArr().dup(); if (nchw) { exp.addi(b.getArr().reshape(1, 4, 1, 1)); } else { exp.addi(b.getArr().reshape(1, 1, 1, 4)); } TestCase tc = new TestCase(sameDiff) .gradientCheck(true) .expectedOutput(bAdd.name(), exp); String err = OpValidation.validate(tc); assertNull(err); } }
Example 10
Source File: RandomOpValidation.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testRange(){ //Technically deterministic, not random... double[][] testCases = new double[][]{ {3,18,3}, {3,1,-0.5}, {0,5,1} }; List<INDArray> exp = Arrays.asList( Nd4j.create(new double[]{3, 6, 9, 12, 15}).castTo(DataType.FLOAT), Nd4j.create(new double[]{3, 2.5, 2, 1.5}).castTo(DataType.FLOAT), Nd4j.create(new double[]{0, 1, 2, 3, 4}).castTo(DataType.FLOAT)); for(int i=0; i<testCases.length; i++ ){ double[] d = testCases[i]; INDArray e = exp.get(i); SameDiff sd = SameDiff.create(); SDVariable range = sd.range(d[0], d[1], d[2], DataType.FLOAT); SDVariable loss = range.std(true); TestCase tc = new TestCase(sd) .expected(range, e) .testName(Arrays.toString(d)) .gradientCheck(false); assertNull(OpValidation.validate(tc)); } }
Example 11
Source File: LossOpValidation.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void TestStdLossMixedDataType(){ // Default Data Type in this test suite is Double. // This test used to throw an Exception that we have mixed data types. SameDiff sd = SameDiff.create(); SDVariable v = sd.placeHolder("x", DataType.FLOAT, 3,4); SDVariable loss = v.std(true); }
Example 12
Source File: MiscOpValidation.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testLinspace(){ SameDiff sd = SameDiff.create(); SDVariable out = sd.linspace("linspace", DataType.DOUBLE, 1,10,10); SDVariable loss = out.std(true); String err = OpValidation.validate(new TestCase(sd) .expected(out, Nd4j.linspace(1,10,10, DataType.DOUBLE)) .gradientCheck(false)); assertNull(err); }
Example 13
Source File: MiscOpValidation.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testLinspace2(){ OpValidationSuite.ignoreFailing(); //TODO 2019/01/18 SameDiff sd = SameDiff.create(); SDVariable out = sd.linspace("linspace", sd.constant(Nd4j.scalar(1)), sd.constant(Nd4j.scalar(10)), sd.constant(Nd4j.scalar(10)), DataType.DOUBLE); SDVariable loss = out.std(true); String err = OpValidation.validate(new TestCase(sd) .expected(out, Nd4j.linspace(1,10,10, DataType.DOUBLE))); assertNull(err); }
Example 14
Source File: MiscOpValidation.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testMmulGradients(){ int[] aShape = new int[]{2,3}; int[] bShape = new int[]{3,4}; List<String> failed = new ArrayList<>(); for( char aOrder : new char[]{'c', 'f'}) { for (char bOrder : new char[]{'c', 'f'}) { for (boolean transposeA : new boolean[]{false, true}) { for (boolean transposeB : new boolean[]{false, true}) { for (boolean transposeResult : new boolean[]{false, true}) { //https://github.com/deeplearning4j/deeplearning4j/issues/5648 Nd4j.getRandom().setSeed(12345); INDArray aArr = Nd4j.rand(DataType.DOUBLE, t(transposeA, aShape)).dup(aOrder); INDArray bArr = Nd4j.rand(DataType.DOUBLE, t(transposeB, bShape)).dup(bOrder); SameDiff sd = SameDiff.create(); SDVariable a = sd.var("a", aArr); SDVariable b = sd.var("b", bArr); SDVariable mmul = sd.mmul(a, b, transposeA, transposeB, transposeResult); INDArray exp = (transposeA ? aArr.transpose() : aArr); exp = exp.mmul(transposeB ? bArr.transpose() : bArr); exp = (transposeResult ? exp.transpose() : exp); SDVariable loss = mmul.std(true); String name = aOrder + "," + bOrder + ",tA=" + transposeA + ",tB=" + transposeB + ",tRes=" + transposeResult; TestCase tc = new TestCase(sd).testName(name) .expected(mmul, exp); String err = OpValidation.validate(tc, true); if(err != null) failed.add(err); } } } } } assertEquals(failed.toString(), 0, failed.size()); }
Example 15
Source File: ShapeOpValidation.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testSegmentOps(){ OpValidationSuite.ignoreFailing(); //https://github.com/deeplearning4j/deeplearning4j/issues/6952 INDArray s = Nd4j.create(new double[]{0,0,0,1,2,2,3,3}, new long[]{8}).castTo(DataType.INT); INDArray d = Nd4j.create(new double[]{5,1,7,2,3,4,1,3}, new long[]{8}); int numSegments = 4; List<String> failed = new ArrayList<>(); for(String op : new String[]{"max", "min", "mean", "prod", "sum", "umax", "umin", "umean", "uprod", "usum", "usqrtn"}) { log.info("Starting test: {}", op); if(op.startsWith("u")){ //Unsorted segment cases s = Nd4j.create(new double[]{3,1,0,0,2,0,3,2}, new long[]{8}).castTo(DataType.INT); d = Nd4j.create(new double[]{1,2,5,7,3,1,3,4}, new long[]{8}); } SameDiff sd = SameDiff.create(); SDVariable data = sd.var("data", d); SDVariable segments = sd.constant("segments", s); SDVariable sm; INDArray exp; switch (op){ case "max": sm = sd.segmentMax(data, segments); exp = Nd4j.create(new double[]{7, 2, 4, 3}); break; case "min": sm = sd.segmentMin(data, segments); exp = Nd4j.create(new double[]{1, 2, 3, 1}); break; case "mean": sm = sd.segmentMean(data, segments); exp = Nd4j.create(new double[]{4.3333333333, 2, 3.5, 2}); break; case "prod": sm = sd.segmentProd(data, segments); exp = Nd4j.create(new double[]{35, 2, 12, 3}); break; case "sum": sm = sd.segmentSum(data, segments); exp = Nd4j.create(new double[]{13, 2, 7, 4}); break; case "umax": sm = sd.unsortedSegmentMax(data, segments, numSegments); exp = Nd4j.create(new double[]{7, 2, 4, 3}); break; case "umin": sm = sd.unsortedSegmentMin(data, segments, numSegments); exp = Nd4j.create(new double[]{1, 2, 3, 1}); break; case "umean": sm = sd.unsortedSegmentMean(data, segments, numSegments); exp = Nd4j.create(new double[]{4.3333333333, 2, 3.5, 2}); break; case "uprod": sm = sd.unsortedSegmentProd(data, segments, numSegments); exp = Nd4j.create(new double[]{35, 2, 12, 3}); break; case "usum": sm = sd.unsortedSegmentSum(data, segments, numSegments); exp = Nd4j.create(new double[]{13, 2, 7, 4}); break; case "usqrtn": sm = sd.unsortedSegmentSqrtN(data, segments, numSegments); exp = Nd4j.create(new double[]{(5+7+1)/Math.sqrt(3), 2, (3+4)/Math.sqrt(2), (1+3)/Math.sqrt(2)}); break; default: throw new RuntimeException(); } SDVariable loss = sm.std(true); sd.addLossVariable(loss); TestCase tc = new TestCase(sd) .testName(op) .expected(sm, exp) .gradientCheck(true) .gradCheckSkipVariables(segments.name()); String err = OpValidation.validate(tc); if(err != null) failed.add(err); } assertEquals(failed.toString(), 0, failed.size()); }
Example 16
Source File: MiscOpValidation.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testZerosOnesLike(){ Nd4j.getRandom().setSeed(12345); List<int[]> shapes = Arrays.asList(new int[0], new int[]{3}, new int[]{3,4}, new int[]{3,4,5}); List<String> failed = new ArrayList<>(); for(boolean zeros : new boolean[]{true, false}) { for (int[] shape : shapes) { SameDiff sd = SameDiff.create(); INDArray arr; if(shape.length > 0){ arr = Nd4j.rand(shape); } else { arr = Nd4j.scalar(Nd4j.rand(new int[]{1,1}).getDouble(0)); } SDVariable var = sd.var("in", arr); SDVariable xLike; if(zeros) { xLike = sd.zerosLike(var); } else { xLike = sd.onesLike(var); } SDVariable loss; if (shape.length > 0) { loss = xLike.std(true); } else { loss = xLike.mean(); } String err = OpValidation.validate(new TestCase(sd) .expected(xLike, (zeros ? Nd4j.zeros(shape) : Nd4j.ones(shape))), true); if(err != null){ failed.add(err); } } } assertEquals(failed.toString(), 0, failed.size()); }
Example 17
Source File: ValidationUtilTests.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testValidateSameDiff() throws Exception { Nd4j.setDataType(DataType.FLOAT); File f = testDir.newFolder(); SameDiff sd = SameDiff.create(); SDVariable v = sd.placeHolder("x", DataType.FLOAT, 3,4); SDVariable loss = v.std(true); File fOrig = new File(f, "sd_fb.fb"); sd.asFlatFile(fOrig);; //Test not existent file: File fNonExistent = new File("doesntExist.fb"); ValidationResult vr0 = Nd4jValidator.validateSameDiffFlatBuffers(fNonExistent); assertFalse(vr0.isValid()); assertEquals("SameDiff FlatBuffers file", vr0.getFormatType()); assertTrue(vr0.getIssues().get(0), vr0.getIssues().get(0).contains("exist")); // System.out.println(vr0.toString()); //Test empty file: File fEmpty = new File(f, "empty.fb"); fEmpty.createNewFile(); assertTrue(fEmpty.exists()); ValidationResult vr1 = Nd4jValidator.validateSameDiffFlatBuffers(fEmpty); assertEquals("SameDiff FlatBuffers file", vr1.getFormatType()); assertFalse(vr1.isValid()); assertTrue(vr1.getIssues().get(0), vr1.getIssues().get(0).contains("empty")); // System.out.println(vr1.toString()); //Test directory (not zip file) File directory = new File(f, "dir"); boolean created = directory.mkdir(); assertTrue(created); ValidationResult vr2 = Nd4jValidator.validateSameDiffFlatBuffers(directory); assertEquals("SameDiff FlatBuffers file", vr2.getFormatType()); assertFalse(vr2.isValid()); assertTrue(vr2.getIssues().get(0), vr2.getIssues().get(0).contains("directory")); // System.out.println(vr2.toString()); //Test non-flatbuffers File fText = new File(f, "text.fb"); FileUtils.writeStringToFile(fText, "Not a flatbuffers file :)", StandardCharsets.UTF_8); ValidationResult vr3 = Nd4jValidator.validateSameDiffFlatBuffers(fText); assertEquals("SameDiff FlatBuffers file", vr3.getFormatType()); assertFalse(vr3.isValid()); String s = vr3.getIssues().get(0); assertTrue(s, s.contains("FlatBuffers") && s.contains("SameDiff") && s.contains("corrupt")); // System.out.println(vr3.toString()); //Test corrupted flatbuffers format: byte[] fbBytes = FileUtils.readFileToByteArray(fOrig); for( int i=0; i<30; i++ ){ fbBytes[i] = (byte)('a' + i); } File fCorrupt = new File(f, "corrupt.fb"); FileUtils.writeByteArrayToFile(fCorrupt, fbBytes); ValidationResult vr4 = Nd4jValidator.validateSameDiffFlatBuffers(fCorrupt); assertEquals("SameDiff FlatBuffers file", vr4.getFormatType()); assertFalse(vr4.isValid()); s = vr4.getIssues().get(0); assertTrue(s, s.contains("FlatBuffers") && s.contains("SameDiff") && s.contains("corrupt")); // System.out.println(vr4.toString()); //Test valid npz format: ValidationResult vr5 = Nd4jValidator.validateSameDiffFlatBuffers(fOrig); assertEquals("SameDiff FlatBuffers file", vr5.getFormatType()); assertTrue(vr5.isValid()); assertNull(vr5.getIssues()); assertNull(vr5.getException()); // System.out.println(vr4.toString()); }
Example 18
Source File: LayerOpValidation.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testAvgPooling2dBasic() { Nd4j.getRandom().setSeed(12345); int nIn = 3; int kH = 2; int kW = 2; int mb = 3; int imgH = 8; int imgW = 8; SameDiff sd = SameDiff.create(); INDArray inArr = Nd4j.rand(new int[]{mb, nIn, imgH, imgW}); SDVariable in = sd.var("in", inArr); Pooling2DConfig pooling2DConfig = Pooling2DConfig.builder() .kH(kH).kW(kW) .pH(0).pW(0) .sH(1).sW(1) .dH(1).dW(1) .isSameMode(false) .build(); SDVariable outPool = sd.cnn().avgPooling2d(in, pooling2DConfig); SDVariable out = sd.nn().tanh("out", outPool); INDArray outArr = out.eval(); val outShape = outArr.shape(); // oH = (iH - (kH + (kH-1)*(dH-1)) + 2*pH)/sH + 1; assertArrayEquals(new long[]{mb, nIn, 7, 7}, outShape); SDVariable loss = out.std(true); INDArray exp = Nd4j.create(mb, nIn, 7, 7); NdIndexIterator iter = new NdIndexIterator(mb, nIn, 7, 7); while (iter.hasNext()) { long[] next = iter.next(); double avg = (inArr.getDouble(next) + inArr.getDouble(next[0], next[1], next[2] + 1, next[3]) + inArr.getDouble(next[0], next[1], next[2], next[3] + 1) + inArr.getDouble(next[0], next[1], next[2] + 1, next[3] + 1)) / 4.0; exp.putScalar(next, avg); } assertNull(OpValidation.validate(new TestCase(sd) .expected(outPool, exp).gradientCheck(true))); }
Example 19
Source File: LayerOpValidation.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testMaxPooling2dBasic() { Nd4j.getRandom().setSeed(12345); int nIn = 3; int kH = 2; int kW = 2; int mb = 3; int imgH = 8; int imgW = 8; SameDiff sd = SameDiff.create(); INDArray inArr = Nd4j.rand(new int[]{mb, nIn, imgH, imgW}); SDVariable in = sd.var("in", inArr); Pooling2DConfig pooling2DConfig = Pooling2DConfig.builder() .kH(kH).kW(kW) .pH(0).pW(0) .sH(1).sW(1) .dH(1).dW(1) .isSameMode(false) .build(); SDVariable outPool = sd.cnn().maxPooling2d(in, pooling2DConfig); SDVariable out = sd.nn().tanh("out", outPool); INDArray outArr = out.eval(); val outShape = outArr.shape(); // oH = (iH - (kH + (kH-1)*(dH-1)) + 2*pH)/sH + 1; assertArrayEquals(new long[]{mb, nIn, 7, 7}, outShape); SDVariable loss = out.std(true); INDArray exp = Nd4j.create(mb, nIn, 7, 7); NdIndexIterator iter = new NdIndexIterator(mb, nIn, 7, 7); while (iter.hasNext()) { long[] next = iter.next(); double max = max(inArr.getDouble(next), inArr.getDouble(next[0], next[1], next[2] + 1, next[3]), inArr.getDouble(next[0], next[1], next[2], next[3] + 1), inArr.getDouble(next[0], next[1], next[2] + 1, next[3] + 1)); exp.putScalar(next, max); } assertNull(OpValidation.validate(new TestCase(sd).gradientCheck(true) .expected(outPool, exp))); }
Example 20
Source File: LayerOpValidation.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testDeconv2dBasic() { int nIn = 2; int nOut = 3; int kH = 2; int kW = 2; int mb = 2; int imgH = 8; int imgW = 8; SameDiff sd = SameDiff.create(); INDArray wArr = Nd4j.rand(new int[]{kH, kW, nOut, nIn}); //Libnd4j expected weights format: [kH, kW, cOut, cIn] INDArray bArr = Nd4j.rand(new long[]{nOut}); INDArray inArr = Nd4j.rand(new long[]{mb, nIn, imgH, imgW}); SDVariable in = sd.var("in", inArr); SDVariable w = sd.var("W", wArr); SDVariable b = sd.var("b", bArr); DeConv2DConfig deconv = DeConv2DConfig.builder() .kH(kH).kW(kW) .pH(0).pW(0) .sH(1).sW(1) .dH(1).dW(1) .isSameMode(false) .build(); SDVariable out = sd.cnn().deconv2d(in, w, b, deconv); out = sd.nn().tanh("out", out); INDArray outArr = out.eval(); //Expected output size: out = (in + k + 2*p)/ s - 1 = (8 + 2+0)/1 - 1 = 9 val outShape = outArr.shape(); assertArrayEquals(new long[]{mb, nOut, 9, 9}, outShape); SDVariable loss = out.std(true); //Gradient check: TestCase tc = new TestCase(sd).gradientCheck(true); String err = OpValidation.validate(tc); assertNull(err); }