Java Code Examples for org.nd4j.autodiff.samediff.SDVariable#sub()
The following examples show how to use
org.nd4j.autodiff.samediff.SDVariable#sub() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: LossFunctions.java From nd4j with Apache License 2.0 | 6 votes |
/** * Mean squared error: L = mean( (predicted - label)^2) * * @param outputName Name of the output SDVariable * @param predictions Predictions variable * @param label Label variable * @param weights Weights array. May be null, or any broadcastable shape (with predictions/label arrays). * Note that this is also used for masking (weight of 0 = 'masked out') * @param reduction Type of reduction to perform for the loss function * @param dimensions Dimension(s) to apply the loss function on * @return LossInfo - bean with variables etc for the loss function */ public static LossInfo mse(String outputName, SDVariable predictions, SDVariable label, SDVariable weights, Reduction reduction, int... dimensions){ LossInfo.Builder b = validate("mse", predictions, label, reduction); SameDiff sd = predictions.getSameDiff(); if(weights == null){ weights = sd.one("mse_loss_weights", SCALAR); } SDVariable diff = predictions.sub(label); String name = (reduction == Reduction.NONE ? outputName : null); SDVariable preReduceLoss = sd.square(diff).mul(name, weights); return doReduce(sd, outputName, true, b, reduction, preReduceLoss, label, weights, dimensions); }
Example 2
Source File: LossFunctions.java From nd4j with Apache License 2.0 | 6 votes |
/** * L2 loss function: i.e., sum of squared errors, L = sum_i (actual_i - predicted)^2 * * @param outputName * @param predictions * @param label * @param weights * @param reduction * @param dimensions * @return */ public static LossInfo l2(String outputName, SDVariable predictions, SDVariable label, SDVariable weights, Reduction reduction, int... dimensions){ LossInfo.Builder b = validate("l2", predictions, label, reduction); SameDiff sd = predictions.getSameDiff(); if(weights == null){ weights = sd.one("l2_loss_weights", SCALAR); } SDVariable diff = predictions.sub(label); String name = (reduction == Reduction.NONE ? outputName : null); SDVariable preReduceLoss = sd.square(diff).mul(name, weights); return doReduce(sd, outputName, false, b, reduction, preReduceLoss, label, weights, dimensions); }
Example 3
Source File: TestSessions.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testInferenceSessionBasic2(){ //So far: trivial test to check execution order SameDiff sd = SameDiff.create(); SDVariable ph1 = sd.placeHolder("x", DataType.FLOAT, 3,3); SDVariable ph2 = sd.placeHolder("y", DataType.FLOAT, 3,3); SDVariable a = ph1.add("a", ph2); SDVariable b = ph1.mmul("b", ph2); SDVariable c = ph1.sub("c", ph2); SDVariable d = a.add("d", b); //To get array d - need to execute: a, b, d - NOT the sub op (c) //NOTE: normally sessions are internal and completely hidden from users InferenceSession is = new InferenceSession(sd); INDArray x = Nd4j.linspace(1, 9, 9).castTo(DataType.FLOAT).reshape(3,3); INDArray y = Nd4j.linspace(0.0, 0.9, 9, DataType.DOUBLE).castTo(DataType.FLOAT).reshape(3,3); INDArray aExp = x.add(y); INDArray bExp = x.mmul(y); INDArray dExp = aExp.add(bExp); Map<String,INDArray> m = new HashMap<>(); m.put("x", x); m.put("y", y); Map<String,INDArray> outMap = is.output(Collections.singletonList("d"), m, null, Collections.<String>emptyList(), null, At.defaultAt(Operation.TRAINING)); assertEquals(1, outMap.size()); assertEquals(dExp, outMap.get("d")); }
Example 4
Source File: SameDiffMSEOutputLayer.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Override public SDVariable defineLayer(SameDiff sameDiff, SDVariable layerInput, SDVariable labels, Map<String, SDVariable> paramTable) { SDVariable z = sameDiff.mmul(layerInput, paramTable.get("W")).add(paramTable.get("b")); SDVariable out = activation.asSameDiff("out", sameDiff, z); //MSE: 1/nOut * (input-labels)^2 SDVariable diff = out.sub(labels); return diff.mul(diff).mean(1).sum(); }
Example 5
Source File: GradCheckMisc.java From nd4j with Apache License 2.0 | 4 votes |
@Test public void testGradientAutoBroadcast1() { Nd4j.getRandom().setSeed(12345); List<String> allFailed = new ArrayList<>(); for (int dim_sz1 : new int[]{0, 1, 2}) { int[] in2Shape = {3, 4, 5}; in2Shape[dim_sz1] = 1; for (int i = 2; i < 3; i++) { SameDiff sd = SameDiff.create(); SDVariable in3 = sd.var("in3", Nd4j.rand(new int[]{3, 4, 5})); SDVariable in2 = sd.var("in2", in2Shape); SDVariable bcOp; String name; switch (i) { case 0: bcOp = in3.add(in2); name = "add"; break; case 1: bcOp = in3.sub(in2); name = "sub"; break; case 2: bcOp = in3.mul(in2); name = "mul"; break; case 3: bcOp = in3.div(in2); name = "div"; break; case 4: bcOp = in3.rsub(in2); name = "rsub"; break; case 5: bcOp = in3.rdiv(in2); name = "rdiv"; break; case 6: bcOp = sd.f().floorDiv(in3, in2); name = "floordiv"; break; case 7: bcOp = sd.f().floorMod(in3, in2); name = "floormod"; break; default: throw new RuntimeException(); } SDVariable outVar = sd.sum(bcOp); String msg = "(test " + i + ": " + name + ", dimension=" + dim_sz1 + ")"; log.info("*** Starting test: " + msg); INDArray in3Arr = Nd4j.randn(new int[]{3, 4, 5}).muli(100); INDArray in2Arr = Nd4j.randn(in2Shape).muli(100); sd.associateArrayWithVariable(in3Arr, in3); sd.associateArrayWithVariable(in2Arr, in2); try { INDArray out = sd.execAndEndResult(); assertNotNull(out); assertArrayEquals(new long[]{1, 1}, out.shape()); // System.out.println(sd.asFlatPrint()); boolean ok = GradCheckUtil.checkGradients(sd); if (!ok) { allFailed.add(msg); } } catch (Exception e) { e.printStackTrace(); allFailed.add(msg + " - EXCEPTION"); } } } assertEquals("Failed: " + allFailed, 0, allFailed.size()); }
Example 6
Source File: GradCheckMisc.java From nd4j with Apache License 2.0 | 4 votes |
@Test public void testGradientAutoBroadcast2() { Nd4j.getRandom().setSeed(12345); List<String> allFailed = new ArrayList<>(); for (int[] dim_sz1s : new int[][]{{0, 1}, {0, 2}, {1, 2}, {0,1,2}}) { int[] otherShape = {3, 4, 5}; otherShape[dim_sz1s[0]] = 1; otherShape[dim_sz1s[1]] = 1; if(dim_sz1s.length == 3){ otherShape[dim_sz1s[2]] = 1; } for (int i = 0; i < 6; i++) { SameDiff sd = SameDiff.create(); SDVariable in3 = sd.var("in3", new int[]{3, 4, 5}); SDVariable in2 = sd.var("inToBc", otherShape); String name; SDVariable bcOp; switch (i) { case 0: bcOp = in3.add(in2); name = "add"; break; case 1: bcOp = in3.sub(in2); name = "sub"; break; case 2: bcOp = in3.mul(in2); name = "mul"; break; case 3: bcOp = in3.div(in2); name = "div"; break; case 4: bcOp = in3.rsub(in2); name = "rsub"; break; case 5: bcOp = in3.rdiv(in2); name = "rdiv"; break; case 6: bcOp = sd.f().floorDiv(in3, in2); name = "floordiv"; break; case 7: bcOp = sd.f().floorMod(in3, in2); name = "floormod"; break; default: throw new RuntimeException(); } SDVariable outVar = sd.sum(bcOp); String msg = "(test " + i + ": " + name + ", dimensions=" + Arrays.toString(dim_sz1s) + ")"; log.info("*** Starting test: " + msg); INDArray in3Arr = Nd4j.randn(new int[]{3,4,5}).muli(100); INDArray in2Arr = Nd4j.randn(otherShape).muli(100); sd.associateArrayWithVariable(in3Arr, in3); sd.associateArrayWithVariable(in2Arr, in2); try { INDArray out = sd.execAndEndResult(); assertNotNull(out); assertArrayEquals(new long[]{1, 1}, out.shape()); // System.out.println(sd.asFlatPrint()); boolean ok = GradCheckUtil.checkGradients(sd); if (!ok) { allFailed.add(msg); } } catch (Exception e) { e.printStackTrace(); allFailed.add(msg + " - EXCEPTION"); } } } assertEquals("Failed: " + allFailed, 0, allFailed.size()); }
Example 7
Source File: GradCheckReductions.java From nd4j with Apache License 2.0 | 4 votes |
@Test public void testReductionGradients1() { //Test reductions: final, but *not* the only function Nd4j.getRandom().setSeed(12345); List<String> allFailed = new ArrayList<>(); for (int dim : new int[]{0, Integer.MAX_VALUE}) { //These two cases are equivalent here for (int i = 0; i < 10; i++) { SameDiff sd = SameDiff.create(); int nOut = 4; int minibatch = 10; SDVariable input = sd.var("in", new int[]{-1, nOut}); SDVariable label = sd.var("label", new int[]{-1, nOut}); SDVariable diff = input.sub(label); SDVariable sqDiff = diff.mul(diff); SDVariable msePerEx = sd.mean("msePerEx", sqDiff, 1); SDVariable loss; String name; switch (i) { case 0: loss = sd.mean("loss", msePerEx, dim); name = "mean"; break; case 1: loss = sd.sum("loss", msePerEx, dim); name = "sum"; break; case 2: loss = sd.standardDeviation("loss", msePerEx, true, dim); name = "stdev"; break; case 3: loss = sd.min("loss", msePerEx, dim); name = "min"; break; case 4: loss = sd.max("loss", msePerEx, dim); name = "max"; break; case 5: loss = sd.variance("loss", msePerEx, true, dim); name = "variance"; break; case 6: loss = sd.prod("loss", msePerEx, dim); name = "prod"; break; case 7: loss = sd.norm1("loss", msePerEx, dim); name = "norm1"; break; case 8: loss = sd.norm2("loss", msePerEx, dim); name = "norm2"; break; case 9: loss = sd.normmax("loss", msePerEx, dim); name = "normmax"; break; default: throw new RuntimeException(); } String msg = "(test " + i + " - " + name + ", dimension=" + dim + ")"; log.info("*** Starting test: " + msg); INDArray inputArr = Nd4j.randn(minibatch, nOut).muli(100); INDArray labelArr = Nd4j.randn(minibatch, nOut).muli(100); sd.associateArrayWithVariable(inputArr, input); sd.associateArrayWithVariable(labelArr, label); try { INDArray out = sd.execAndEndResult(); assertNotNull(out); assertArrayEquals(new int[]{1, 1}, out.shape()); // System.out.println(sd.asFlatPrint()); boolean ok = GradCheckUtil.checkGradients(sd); if (!ok) { allFailed.add(msg); } } catch (Exception e) { e.printStackTrace(); allFailed.add(msg + " - EXCEPTION"); } } } assertEquals("Failed: " + allFailed, 0, allFailed.size()); }
Example 8
Source File: ReductionOpValidation.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testReductionsBackwards() { // for (int i = 0; i < 7; i++) { int i=5; { SameDiff sd = SameDiff.create(); int nOut = 4; int minibatch = 3; SDVariable input = sd.var("in", DataType.DOUBLE, new long[]{minibatch, nOut}); SDVariable label = sd.var("label", DataType.DOUBLE, new long[]{minibatch, nOut}); SDVariable diff = input.sub(label); SDVariable sqDiff = diff.mul(diff); SDVariable msePerEx = sd.mean("msePerEx", sqDiff, 1); SDVariable loss; //Scalar value String name; switch (i) { case 0: loss = sd.mean("loss", msePerEx, 0); name = "mean"; break; case 1: loss = sd.sum("loss", msePerEx, 0); name = "sum"; break; case 2: loss = sd.standardDeviation("loss", msePerEx, true, 0); name = "stdev"; break; case 3: loss = sd.min("loss", msePerEx, 0); name = "min"; break; case 4: loss = sd.max("loss", msePerEx, 0); name = "max"; break; case 5: loss = sd.variance("loss", msePerEx, true, 0); name = "variance"; break; case 6: loss = sd.prod("loss", msePerEx, 0); name = "prod"; break; default: throw new RuntimeException(); } String msg = "test: " + i + " - " + name; log.info("*** Starting test: " + msg); INDArray inputArr = Nd4j.rand(DataType.DOUBLE, minibatch, nOut); INDArray labelArr = Nd4j.rand(DataType.DOUBLE, minibatch, nOut); sd.associateArrayWithVariable(inputArr, input); sd.associateArrayWithVariable(labelArr, label); INDArray result = loss.eval(); assertEquals(1, result.length()); sd.calculateGradients(Collections.emptyMap(), sd.getVariables().keySet()); } }
Example 9
Source File: MiscOpValidation.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testGradientAutoBroadcast1() { Nd4j.getRandom().setSeed(12345); List<String> failed = new ArrayList<>(); for (int dim_sz1 : new int[]{0, 1, 2}) { int[] in2Shape = {3, 4, 5}; in2Shape[dim_sz1] = 1; for (int i = 0; i < 8; i++) { SameDiff sd = SameDiff.create(); SDVariable in3 = sd.var("in3", Nd4j.rand(new int[]{3, 4, 5})); SDVariable in2 = sd.var("in2", in2Shape); SDVariable bcOp; String name; switch (i) { case 0: bcOp = in3.add(in2); name = "add"; break; case 1: bcOp = in3.sub(in2); name = "sub"; break; case 2: bcOp = in3.mul(in2); name = "mul"; break; case 3: bcOp = in3.div(in2); name = "div"; break; case 4: bcOp = in3.rsub(in2); name = "rsub"; break; case 5: bcOp = in3.rdiv(in2); name = "rdiv"; break; case 6: //bcOp = sd.scalarFloorDiv(in3, in2); bcOp = new FloorDivOp(sd, in3, in2).outputVariable(); name = "floordiv"; break; case 7: //bcOp = sd.scalarFloorMod(in3, in2); bcOp = new FloorModOp(sd, in3, in2).outputVariable(); name = "floormod"; if(OpValidationSuite.IGNORE_FAILING){ //https://github.com/deeplearning4j/deeplearning4j/issues/5976 continue; } break; default: throw new RuntimeException(); } SDVariable outVar = sd.sum(bcOp); String msg = "(test " + i + ": " + name + ", dimension=" + dim_sz1 + ")"; log.info("*** Starting test: " + msg); INDArray in3Arr = Nd4j.randn(new int[]{3, 4, 5}).muli(100); INDArray in2Arr = Nd4j.randn(in2Shape).muli(100); sd.associateArrayWithVariable(in3Arr, in3); sd.associateArrayWithVariable(in2Arr, in2); TestCase tc = new TestCase(sd); String error = OpValidation.validate(tc); if(error != null){ failed.add(name); } } } assertEquals("Failed: " + failed, 0, failed.size()); }
Example 10
Source File: MiscOpValidation.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testGradientAutoBroadcast2() { Nd4j.getRandom().setSeed(12345); List<String> failed = new ArrayList<>(); for (int[] dim_sz1s : new int[][]{{0, 1}, {0, 2}, {1, 2}, {0, 1, 2}}) { long[] otherShape = {3, 4, 5}; otherShape[dim_sz1s[0]] = 1; otherShape[dim_sz1s[1]] = 1; if (dim_sz1s.length == 3) { otherShape[dim_sz1s[2]] = 1; } for (int i = 0; i < 8; i++) { SameDiff sd = SameDiff.create(); SDVariable in3 = sd.var("in3", DataType.DOUBLE, 3, 4, 5); SDVariable in2 = sd.var("inToBc", DataType.DOUBLE, otherShape); String name; SDVariable bcOp; switch (i) { case 0: bcOp = in3.add(in2); name = "add"; break; case 1: bcOp = in3.sub(in2); name = "sub"; break; case 2: bcOp = in3.mul(in2); name = "mul"; break; case 3: bcOp = in3.div(in2); name = "div"; break; case 4: bcOp = in3.rsub(in2); name = "rsub"; break; case 5: bcOp = in3.rdiv(in2); name = "rdiv"; break; case 6: //bcOp = sd.scalarFloorDiv(in3, in2); bcOp = new FloorDivOp(sd, in3, in2).outputVariable(); name = "floordiv"; break; case 7: //bcOp = sd.scalarFloorMod(in3, in2); bcOp = new FloorModOp(sd, in3, in2).outputVariable(); name = "floormod"; if(OpValidationSuite.IGNORE_FAILING){ //https://github.com/deeplearning4j/deeplearning4j/issues/5976 continue; } break; default: throw new RuntimeException(); } SDVariable outVar = sd.sum(bcOp); String msg = "(test " + i + ": " + name + ", dimensions=" + Arrays.toString(dim_sz1s) + ")"; log.info("*** Starting test: " + msg); INDArray in3Arr = Nd4j.randn(DataType.DOUBLE, 3, 4, 5).muli(100); INDArray in2Arr = Nd4j.randn(DataType.DOUBLE, otherShape).muli(100); sd.associateArrayWithVariable(in3Arr, in3); sd.associateArrayWithVariable(in2Arr, in2); TestCase tc = new TestCase(sd); String error = OpValidation.validate(tc); if(error != null){ failed.add(name); } } } assertEquals("Failed: " + failed, 0, failed.size()); }
Example 11
Source File: MiscOpValidation.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testGradientAutoBroadcast3() { //These tests: output size > input sizes Nd4j.getRandom().setSeed(12345); List<String> failed = new ArrayList<>(); //Test cases: in1Shape, in2Shape, shapeOf(op(in1,in2)) List<Triple<long[], long[], long[]>> testCases = new ArrayList<>(); testCases.add(new Triple<>(new long[]{3, 1}, new long[]{1, 4}, new long[]{3, 4})); testCases.add(new Triple<>(new long[]{3, 1}, new long[]{3, 4}, new long[]{3, 4})); testCases.add(new Triple<>(new long[]{3, 4}, new long[]{1, 4}, new long[]{3, 4})); testCases.add(new Triple<>(new long[]{3, 4, 1}, new long[]{1, 1, 5}, new long[]{3, 4, 5})); testCases.add(new Triple<>(new long[]{3, 4, 1}, new long[]{3, 1, 5}, new long[]{3, 4, 5})); testCases.add(new Triple<>(new long[]{3, 1, 5}, new long[]{1, 4, 1}, new long[]{3, 4, 5})); testCases.add(new Triple<>(new long[]{3, 1, 5}, new long[]{1, 4, 5}, new long[]{3, 4, 5})); testCases.add(new Triple<>(new long[]{3, 1, 5}, new long[]{3, 4, 5}, new long[]{3, 4, 5})); testCases.add(new Triple<>(new long[]{3, 1, 1, 1}, new long[]{1, 4, 5, 6}, new long[]{3, 4, 5, 6})); testCases.add(new Triple<>(new long[]{1, 1, 1, 6}, new long[]{3, 4, 5, 6}, new long[]{3, 4, 5, 6})); testCases.add(new Triple<>(new long[]{1, 4, 5, 1}, new long[]{3, 1, 1, 6}, new long[]{3, 4, 5, 6})); if(!OpValidationSuite.IGNORE_FAILING) { testCases.add(new Triple<>(new long[]{1, 6}, new long[]{3, 4, 5, 1}, new long[]{3, 4, 5, 6})); } for (val p : testCases) { for (int i = 0; i < 8; i++) { SameDiff sd = SameDiff.create(); SDVariable in3 = sd.var("in1", DataType.DOUBLE, p.getFirst()); SDVariable in2 = sd.var("in2", DataType.DOUBLE, p.getSecond()); String name; SDVariable bcOp; switch (i) { case 0: bcOp = in3.add(in2); name = "add"; break; case 1: bcOp = in3.sub(in2); name = "sub"; break; case 2: bcOp = in3.mul(in2); name = "mul"; break; case 3: bcOp = in3.div(in2); name = "div"; break; case 4: bcOp = in3.rsub(in2); name = "rsub"; break; case 5: bcOp = in3.rdiv(in2); name = "rdiv"; break; case 6: //bcOp = sd.scalarFloorDiv(in3, in2); bcOp = new FloorDivOp(sd, in3, in2).outputVariable(); name = "floordiv"; break; case 7: //bcOp = sd.scalarFloorMod(in3, in2); bcOp = new FloorModOp(sd, in3, in2).outputVariable(); name = "floormod"; if(OpValidationSuite.IGNORE_FAILING){ //https://github.com/deeplearning4j/deeplearning4j/issues/5976 continue; } break; default: throw new RuntimeException(); } SDVariable outVar = sd.sum(bcOp); String msg = "(test " + i + ": " + name + ", array 1 size =" + Arrays.toString(p.getFirst()) + ", array 2 size = " + Arrays.toString(p.getSecond()) + ")"; log.info("*** Starting test: " + msg); INDArray in3Arr = Nd4j.rand(DataType.DOUBLE, p.getFirst()).muli(100); INDArray in2Arr = Nd4j.rand(DataType.DOUBLE, p.getSecond()).muli(100); sd.associateArrayWithVariable(in3Arr, in3); sd.associateArrayWithVariable(in2Arr, in2); TestCase tc = new TestCase(sd); String error = OpValidation.validate(tc); if(error != null){ failed.add(name + " " + i + " - " + error); } } } assertEquals("Failed: " + failed, 0, failed.size()); }
Example 12
Source File: TransformOpValidation.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testScalarOps() { int d0 = 2; int d1 = 3; int d2 = 4; int n = d0 * d1 * d2; List<String> failed = new ArrayList<>(); for (int i = 0; i < 11; i++) { for (char inOrder : new char[]{'c', 'f'}) { SameDiff sd = SameDiff.create(); INDArray inArr = Nd4j.linspace(1, n, n, DataType.DOUBLE).reshape('c', d0, d1, d2).dup(inOrder); SDVariable in = sd.var("in", inArr); TestCase tc = new TestCase(sd).gradientCheck(true); SDVariable out; String msg; switch (i) { case 0: out = in.mul(2); tc.expectedOutput(out.name(), inArr.mul(2)); msg = "mul - " + inOrder; break; case 1: out = in.div(2); tc.expectedOutput(out.name(), inArr.div(2)); msg = "div - " + inOrder; break; case 2: out = in.add(2); tc.expectedOutput(out.name(), inArr.add(2)); msg = "add - " + inOrder; break; case 3: out = in.sub(2); tc.expectedOutput(out.name(), inArr.sub(2)); msg = "sub - " + inOrder; break; case 4: out = in.rdiv(2); tc.expectedOutput(out.name(), inArr.rdiv(2)); msg = "rdiv - " + inOrder; break; case 5: out = in.rsub(2); tc.expectedOutput(out.name(), inArr.rsub(2)); msg = "rsub - " + inOrder; break; case 6: out = sd.math().pow(in, 2); tc.expectedOutput(out.name(), Transforms.pow(inArr, 2)); msg = "pow - " + inOrder; break; case 7: inArr.assign(Nd4j.rand(inArr.dataType(), inArr.shape()).muli(5).subi(2.5)); out = sd.math().floorMod(in, 2.0); tc.expected(out, Nd4j.getExecutioner().exec(new ScalarFMod(inArr.dup(), 2.0))); msg = "scalarFloorMod - " + inOrder; break; case 8: inArr.assign(Nd4j.rand(inArr.shape())); out = sd.scalarMax(in, 0.5); tc.expected(out, Transforms.max(inArr.dup(), 0.5)); msg = "scalarMax - " + inOrder; break; case 9: inArr.assign(Nd4j.rand(inArr.shape())); out = sd.scalarMin(in, 0.5); tc.expected(out, Transforms.min(inArr.dup(), 0.5)); msg = "scalarMin - " + inOrder; break; case 10: out = in.assign(0.5); tc.expected(out, Nd4j.valueArrayOf(inArr.shape(), 0.5)); msg = "scalarSet - " + inOrder; break; default: throw new RuntimeException(); } tc.testName(msg); SDVariable loss = sd.standardDeviation(out, true); log.info("Starting test: " + msg); String err = OpValidation.validate(tc, true); if (err != null) { failed.add(err); } } } assertEquals(failed.toString(), 0, failed.size()); }
Example 13
Source File: SameDiffMSELossLayer.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Override public SDVariable defineLayer(SameDiff sameDiff, SDVariable layerInput, SDVariable labels, Map<String, SDVariable> paramTable) { //MSE: 1/nOut * (input-labels)^2 SDVariable diff = layerInput.sub(labels); return diff.mul(diff).mean(1).sum(0); }