Java Code Examples for org.nd4j.autodiff.samediff.SDVariable#add()
The following examples show how to use
org.nd4j.autodiff.samediff.SDVariable#add() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: GraphExecutionerTest.java From deeplearning4j with Apache License 2.0 | 6 votes |
/** * VarSpace should dump everything. 4 variables in our case * @throws Exception */ @Test public void testEquality1() { OpValidationSuite.ignoreFailing(); //Failing 2019/01/24 GraphExecutioner executionerA = new BasicGraphExecutioner(); GraphExecutioner executionerB = new NativeGraphExecutioner(); SameDiff sameDiff = SameDiff.create(); INDArray ones = Nd4j.ones(4); SDVariable sdVariable = sameDiff.var("ones",ones); SDVariable scalarOne = sameDiff.var("scalar",Nd4j.scalar(1.0)); SDVariable result = sdVariable.add(scalarOne); SDVariable total = sameDiff.sum(result,Integer.MAX_VALUE); log.info("TOTAL: {}; Id: {}", total.name(), total); INDArray[] resB = executionerB.executeGraph(sameDiff, configVarSpace); //Variables: ones, scalar, result, total assertEquals(sameDiff.variables().size(), resB.length); assertEquals(Nd4j.ones(4), resB[0]); assertEquals(Nd4j.scalar(1), resB[1]); assertEquals(Nd4j.create(new float[]{2f, 2f, 2f, 2f}), resB[2]); assertEquals(Nd4j.scalar(8.0), resB[3]); }
Example 2
Source File: SameDiffDense.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Override public SDVariable defineLayer(SameDiff sd, SDVariable layerInput, Map<String, SDVariable> paramTable, SDVariable mask) { SDVariable weights = paramTable.get(DefaultParamInitializer.WEIGHT_KEY); SDVariable bias = paramTable.get(DefaultParamInitializer.BIAS_KEY); SDVariable mmul = sd.mmul("mmul", layerInput, weights); SDVariable z = mmul.add("z", bias); return activation.asSameDiff("out", sd, z); }
Example 3
Source File: SameDiffDenseVertex.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Override public SDVariable defineVertex(SameDiff sameDiff, Map<String, SDVariable> layerInput, Map<String, SDVariable> paramTable, Map<String, SDVariable> maskVars) { SDVariable weights = paramTable.get(DefaultParamInitializer.WEIGHT_KEY); SDVariable bias = paramTable.get(DefaultParamInitializer.BIAS_KEY); SDVariable mmul = sameDiff.mmul("mmul", layerInput.get("in"), weights); SDVariable z = mmul.add("z", bias); return activation.asSameDiff("out", sameDiff, z); }
Example 4
Source File: Gather.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Override public List<SDVariable> doDiff(List<SDVariable> i_v){ //2 args: input and indices. Plus integer dimension arg //Gather backprop is just scatter add SDVariable indicesGrad = sameDiff.zerosLike(arg(1)); SDVariable inputGrad = sameDiff.zerosLike(arg(0)); SDVariable[] inputs = args(); SDVariable axis; SDVariable rank = inputs[0].rank(); if(inputs.length == 2){ axis = sameDiff.constant(jaxis); if(jaxis < 0) axis = axis.add(rank); } else { axis = inputs[2]; } //Use scatter add plus permute SDVariable dimsExAxis = sameDiff.range(null, sameDiff.constant(0), rank, sameDiff.constant(1), DataType.INT); SDVariable axisRank1 = axis.reshape(1); dimsExAxis = sameDiff.math().listDiff(dimsExAxis, axisRank1)[0]; //Don't need indices SDVariable permuteDims = sameDiff.concat(0, axisRank1, dimsExAxis); SDVariable invertDims = sameDiff.invertPermutation(permuteDims); //Permute gradients so original axis is at position 0... then scatter add, and reverse SDVariable gradAtOut = i_v.get(0); SDVariable permuteGrad = gradAtOut.permute(permuteDims); SDVariable inputGradPermute = inputGrad.permute(permuteDims); inputGrad = sameDiff.scatterAdd(inputGradPermute, arg(1), permuteGrad); //Now, invert the permutation so axis is back where it was inputGrad = inputGrad.permute(invertDims); return Arrays.asList(inputGrad, indicesGrad); }
Example 5
Source File: MinimalSameDiffDense.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Override public SDVariable defineLayer(SameDiff sd, SDVariable layerInput, Map<String, SDVariable> paramTable, SDVariable mask) { SDVariable weights = paramTable.get(DefaultParamInitializer.WEIGHT_KEY); SDVariable bias = paramTable.get(DefaultParamInitializer.BIAS_KEY); SDVariable mmul = sd.mmul("mmul", layerInput, weights); SDVariable z = mmul.add("z", bias); return activation.asSameDiff("out", sd, z); }
Example 6
Source File: GraphExecutionerTest.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test @Ignore public void testSums1() { SameDiff sameDiff = SameDiff.create(); INDArray ones = Nd4j.ones(4); SDVariable sdVariable = sameDiff.var("ones",ones); SDVariable result = sdVariable.add(1.0); SDVariable total = sameDiff.sum(result,Integer.MAX_VALUE); val executioner = new NativeGraphExecutioner(); INDArray[] res = executioner.executeGraph(sameDiff); assertEquals(8.0, res[0].getDouble(0), 1e-5); }
Example 7
Source File: SameDiffInferenceExecutionerTests.java From konduit-serving with Apache License 2.0 | 5 votes |
@Test(timeout = 60000) public void testSameDiff() throws Exception { SameDiffInferenceExecutioner sameDiffInferenceExecutioner = new SameDiffInferenceExecutioner(); SameDiff sameDiff = SameDiff.create(); SDVariable input1 = sameDiff.placeHolder("input1", DataType.FLOAT,2, 2); SDVariable input2 = sameDiff.placeHolder("input2", DataType.FLOAT,2, 2); SDVariable result = input1.add("output", input2); INDArray input1Arr = Nd4j.linspace(1, 4, 4).reshape(2, 2); INDArray input2Arr = Nd4j.linspace(1, 4, 4).reshape(2, 2); sameDiff.associateArrayWithVariable(input1Arr, input1.name()); sameDiff.associateArrayWithVariable(input2Arr, input2.name()); Map<String, INDArray> indArrays = new LinkedHashMap<>(); indArrays.put(input1.name(), input1Arr); indArrays.put(input2.name(), input2Arr); Map<String, INDArray> outputs = sameDiff.outputAll(indArrays); assertEquals(3, outputs.size()); ParallelInferenceConfig parallelInferenceConfig = ParallelInferenceConfig.defaultConfig(); File newFile = temporary.newFile(); sameDiff.asFlatFile(newFile); SameDiffModelLoader sameDiffModelLoader = new SameDiffModelLoader(newFile, Arrays.asList("input1", "input2"), Arrays.asList("output")); sameDiffInferenceExecutioner.initialize(sameDiffModelLoader, parallelInferenceConfig); INDArray[] execute = sameDiffInferenceExecutioner.execute(new INDArray[]{input1Arr, input2Arr}); assertEquals(outputs.values().iterator().next(), execute[0]); }
Example 8
Source File: GraphExecutionerTest.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test @Ignore public void testConversion() throws Exception { SameDiff sameDiff = SameDiff.create(); INDArray ones = Nd4j.ones(4); SDVariable sdVariable = sameDiff.var("ones",ones); SDVariable result = sdVariable.add(1.0); SDVariable total = sameDiff.sum(result,Integer.MAX_VALUE); val executioner = new NativeGraphExecutioner(); ByteBuffer buffer = executioner.convertToFlatBuffers(sameDiff, ExecutorConfiguration.builder().profilingMode(OpExecutioner.ProfilingMode.DISABLED).executionMode(ExecutionMode.SEQUENTIAL).outputMode(OutputMode.IMPLICIT).build()); val offset = buffer.position(); val array = buffer.array(); try (val fos = new FileOutputStream("../../libnd4j/tests/resources/adam_sum.fb"); val dos = new DataOutputStream(fos)) { dos.write(array, offset, array.length - offset); } //INDArray[] res = executioner.executeGraph(sameDiff); //assertEquals(8.0, res[0].getDouble(0), 1e-5); /* INDArray output = null; for(int i = 0; i < 5; i++) { output = sameDiff.execAndEndResult(ops); System.out.println("Ones " + ones); System.out.println(output); } assertEquals(Nd4j.valueArrayOf(4,7),ones); assertEquals(28,output.getDouble(0),1e-1); */ }
Example 9
Source File: TestSessions.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testInferenceSessionBasic2(){ //So far: trivial test to check execution order SameDiff sd = SameDiff.create(); SDVariable ph1 = sd.placeHolder("x", DataType.FLOAT, 3,3); SDVariable ph2 = sd.placeHolder("y", DataType.FLOAT, 3,3); SDVariable a = ph1.add("a", ph2); SDVariable b = ph1.mmul("b", ph2); SDVariable c = ph1.sub("c", ph2); SDVariable d = a.add("d", b); //To get array d - need to execute: a, b, d - NOT the sub op (c) //NOTE: normally sessions are internal and completely hidden from users InferenceSession is = new InferenceSession(sd); INDArray x = Nd4j.linspace(1, 9, 9).castTo(DataType.FLOAT).reshape(3,3); INDArray y = Nd4j.linspace(0.0, 0.9, 9, DataType.DOUBLE).castTo(DataType.FLOAT).reshape(3,3); INDArray aExp = x.add(y); INDArray bExp = x.mmul(y); INDArray dExp = aExp.add(bExp); Map<String,INDArray> m = new HashMap<>(); m.put("x", x); m.put("y", y); Map<String,INDArray> outMap = is.output(Collections.singletonList("d"), m, null, Collections.<String>emptyList(), null, At.defaultAt(Operation.TRAINING)); assertEquals(1, outMap.size()); assertEquals(dExp, outMap.get("d")); }
Example 10
Source File: SameDiffVerticleNd4jTest.java From konduit-serving with Apache License 2.0 | 5 votes |
@Override public JsonObject getConfigObject() throws Exception { SameDiff sameDiff = SameDiff.create(); SDVariable x = sameDiff.placeHolder("x", DataType.FLOAT, 2); SDVariable y = sameDiff.placeHolder("y", DataType.FLOAT, 2); SDVariable add = x.add("output", y); File tmpSameDiffFile = temporary.newFile(); sameDiff.asFlatFile(tmpSameDiffFile); SameDiff values = SameDiff.fromFlatFile(tmpSameDiffFile); ServingConfig servingConfig = ServingConfig.builder() .outputDataFormat(Output.DataFormat.ND4J) .httpPort(port) .build(); SameDiffStep modelPipelineConfig = SameDiffStep.builder() .path(tmpSameDiffFile.getAbsolutePath()) .inputNames(Arrays.asList("x", "y")) .outputNames(Collections.singletonList("output")) .build(); InferenceConfiguration inferenceConfiguration = InferenceConfiguration.builder() .servingConfig(servingConfig) .step(modelPipelineConfig) .build(); return new JsonObject(inferenceConfiguration.toJson()); }
Example 11
Source File: SameDiffVerticleNumpyTest.java From konduit-serving with Apache License 2.0 | 5 votes |
@Override public JsonObject getConfigObject() throws Exception { SameDiff sameDiff = SameDiff.create(); SDVariable x = sameDiff.placeHolder("x", DataType.FLOAT, 2); SDVariable y = sameDiff.placeHolder("y", DataType.FLOAT, 2); SDVariable add = x.add("output", y); File tmpSameDiffFile = temporary.newFile(); sameDiff.asFlatFile(tmpSameDiffFile); ServingConfig servingConfig = ServingConfig.builder() .outputDataFormat(Output.DataFormat.NUMPY) .httpPort(port) .build(); SameDiffStep config = SameDiffStep.builder() .path(tmpSameDiffFile.getAbsolutePath()) .inputNames(Arrays.asList("x", "y")) .outputNames(Collections.singletonList("output")) .build(); InferenceConfiguration inferenceConfiguration = InferenceConfiguration.builder() .servingConfig(servingConfig) .step(config) .build(); return new JsonObject(inferenceConfiguration.toJson()); }
Example 12
Source File: SameDiffVerticleClassificationMetricsTest.java From konduit-serving with Apache License 2.0 | 5 votes |
@Override public JsonObject getConfigObject() throws Exception { SameDiff sameDiff = SameDiff.create(); SDVariable x = sameDiff.placeHolder("x", DataType.FLOAT, 2); SDVariable y = sameDiff.placeHolder("y", DataType.FLOAT, 2); SDVariable add = x.add("output", y); File tmpSameDiffFile = temporary.newFile(); sameDiff.asFlatFile(tmpSameDiffFile); SameDiff values = SameDiff.fromFlatFile(tmpSameDiffFile); ServingConfig servingConfig = ServingConfig.builder() .outputDataFormat(Output.DataFormat.ND4J) .metricsConfigurations(Collections.singletonList(ClassificationMetricsConfig.builder() .classificationLabels(Arrays.asList("0", "1")).build())) .metricTypes(Collections.singletonList(MetricType.CLASSIFICATION)) .httpPort(port) .build(); SameDiffStep modelPipelineConfig = SameDiffStep.builder() .path(tmpSameDiffFile.getAbsolutePath()) .inputNames(Arrays.asList("x", "y")) .outputNames(Collections.singletonList("output")) .build(); InferenceConfiguration inferenceConfiguration = InferenceConfiguration.builder() .servingConfig(servingConfig) .step(modelPipelineConfig) .build(); return new JsonObject(inferenceConfiguration.toJson()); }
Example 13
Source File: GradCheckMisc.java From nd4j with Apache License 2.0 | 4 votes |
@Test public void testGradientAutoBroadcast2() { Nd4j.getRandom().setSeed(12345); List<String> allFailed = new ArrayList<>(); for (int[] dim_sz1s : new int[][]{{0, 1}, {0, 2}, {1, 2}, {0,1,2}}) { int[] otherShape = {3, 4, 5}; otherShape[dim_sz1s[0]] = 1; otherShape[dim_sz1s[1]] = 1; if(dim_sz1s.length == 3){ otherShape[dim_sz1s[2]] = 1; } for (int i = 0; i < 6; i++) { SameDiff sd = SameDiff.create(); SDVariable in3 = sd.var("in3", new int[]{3, 4, 5}); SDVariable in2 = sd.var("inToBc", otherShape); String name; SDVariable bcOp; switch (i) { case 0: bcOp = in3.add(in2); name = "add"; break; case 1: bcOp = in3.sub(in2); name = "sub"; break; case 2: bcOp = in3.mul(in2); name = "mul"; break; case 3: bcOp = in3.div(in2); name = "div"; break; case 4: bcOp = in3.rsub(in2); name = "rsub"; break; case 5: bcOp = in3.rdiv(in2); name = "rdiv"; break; case 6: bcOp = sd.f().floorDiv(in3, in2); name = "floordiv"; break; case 7: bcOp = sd.f().floorMod(in3, in2); name = "floormod"; break; default: throw new RuntimeException(); } SDVariable outVar = sd.sum(bcOp); String msg = "(test " + i + ": " + name + ", dimensions=" + Arrays.toString(dim_sz1s) + ")"; log.info("*** Starting test: " + msg); INDArray in3Arr = Nd4j.randn(new int[]{3,4,5}).muli(100); INDArray in2Arr = Nd4j.randn(otherShape).muli(100); sd.associateArrayWithVariable(in3Arr, in3); sd.associateArrayWithVariable(in2Arr, in2); try { INDArray out = sd.execAndEndResult(); assertNotNull(out); assertArrayEquals(new long[]{1, 1}, out.shape()); // System.out.println(sd.asFlatPrint()); boolean ok = GradCheckUtil.checkGradients(sd); if (!ok) { allFailed.add(msg); } } catch (Exception e) { e.printStackTrace(); allFailed.add(msg + " - EXCEPTION"); } } } assertEquals("Failed: " + allFailed, 0, allFailed.size()); }
Example 14
Source File: GradCheckMisc.java From nd4j with Apache License 2.0 | 4 votes |
@Test public void testGradientAutoBroadcast1() { Nd4j.getRandom().setSeed(12345); List<String> allFailed = new ArrayList<>(); for (int dim_sz1 : new int[]{0, 1, 2}) { int[] in2Shape = {3, 4, 5}; in2Shape[dim_sz1] = 1; for (int i = 2; i < 3; i++) { SameDiff sd = SameDiff.create(); SDVariable in3 = sd.var("in3", Nd4j.rand(new int[]{3, 4, 5})); SDVariable in2 = sd.var("in2", in2Shape); SDVariable bcOp; String name; switch (i) { case 0: bcOp = in3.add(in2); name = "add"; break; case 1: bcOp = in3.sub(in2); name = "sub"; break; case 2: bcOp = in3.mul(in2); name = "mul"; break; case 3: bcOp = in3.div(in2); name = "div"; break; case 4: bcOp = in3.rsub(in2); name = "rsub"; break; case 5: bcOp = in3.rdiv(in2); name = "rdiv"; break; case 6: bcOp = sd.f().floorDiv(in3, in2); name = "floordiv"; break; case 7: bcOp = sd.f().floorMod(in3, in2); name = "floormod"; break; default: throw new RuntimeException(); } SDVariable outVar = sd.sum(bcOp); String msg = "(test " + i + ": " + name + ", dimension=" + dim_sz1 + ")"; log.info("*** Starting test: " + msg); INDArray in3Arr = Nd4j.randn(new int[]{3, 4, 5}).muli(100); INDArray in2Arr = Nd4j.randn(in2Shape).muli(100); sd.associateArrayWithVariable(in3Arr, in3); sd.associateArrayWithVariable(in2Arr, in2); try { INDArray out = sd.execAndEndResult(); assertNotNull(out); assertArrayEquals(new long[]{1, 1}, out.shape()); // System.out.println(sd.asFlatPrint()); boolean ok = GradCheckUtil.checkGradients(sd); if (!ok) { allFailed.add(msg); } } catch (Exception e) { e.printStackTrace(); allFailed.add(msg + " - EXCEPTION"); } } } assertEquals("Failed: " + allFailed, 0, allFailed.size()); }
Example 15
Source File: MiscOpValidation.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testGradientAutoBroadcast1() { Nd4j.getRandom().setSeed(12345); List<String> failed = new ArrayList<>(); for (int dim_sz1 : new int[]{0, 1, 2}) { int[] in2Shape = {3, 4, 5}; in2Shape[dim_sz1] = 1; for (int i = 0; i < 8; i++) { SameDiff sd = SameDiff.create(); SDVariable in3 = sd.var("in3", Nd4j.rand(new int[]{3, 4, 5})); SDVariable in2 = sd.var("in2", in2Shape); SDVariable bcOp; String name; switch (i) { case 0: bcOp = in3.add(in2); name = "add"; break; case 1: bcOp = in3.sub(in2); name = "sub"; break; case 2: bcOp = in3.mul(in2); name = "mul"; break; case 3: bcOp = in3.div(in2); name = "div"; break; case 4: bcOp = in3.rsub(in2); name = "rsub"; break; case 5: bcOp = in3.rdiv(in2); name = "rdiv"; break; case 6: //bcOp = sd.scalarFloorDiv(in3, in2); bcOp = new FloorDivOp(sd, in3, in2).outputVariable(); name = "floordiv"; break; case 7: //bcOp = sd.scalarFloorMod(in3, in2); bcOp = new FloorModOp(sd, in3, in2).outputVariable(); name = "floormod"; if(OpValidationSuite.IGNORE_FAILING){ //https://github.com/deeplearning4j/deeplearning4j/issues/5976 continue; } break; default: throw new RuntimeException(); } SDVariable outVar = sd.sum(bcOp); String msg = "(test " + i + ": " + name + ", dimension=" + dim_sz1 + ")"; log.info("*** Starting test: " + msg); INDArray in3Arr = Nd4j.randn(new int[]{3, 4, 5}).muli(100); INDArray in2Arr = Nd4j.randn(in2Shape).muli(100); sd.associateArrayWithVariable(in3Arr, in3); sd.associateArrayWithVariable(in2Arr, in2); TestCase tc = new TestCase(sd); String error = OpValidation.validate(tc); if(error != null){ failed.add(name); } } } assertEquals("Failed: " + failed, 0, failed.size()); }
Example 16
Source File: MiscOpValidation.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testGradientAutoBroadcast2() { Nd4j.getRandom().setSeed(12345); List<String> failed = new ArrayList<>(); for (int[] dim_sz1s : new int[][]{{0, 1}, {0, 2}, {1, 2}, {0, 1, 2}}) { long[] otherShape = {3, 4, 5}; otherShape[dim_sz1s[0]] = 1; otherShape[dim_sz1s[1]] = 1; if (dim_sz1s.length == 3) { otherShape[dim_sz1s[2]] = 1; } for (int i = 0; i < 8; i++) { SameDiff sd = SameDiff.create(); SDVariable in3 = sd.var("in3", DataType.DOUBLE, 3, 4, 5); SDVariable in2 = sd.var("inToBc", DataType.DOUBLE, otherShape); String name; SDVariable bcOp; switch (i) { case 0: bcOp = in3.add(in2); name = "add"; break; case 1: bcOp = in3.sub(in2); name = "sub"; break; case 2: bcOp = in3.mul(in2); name = "mul"; break; case 3: bcOp = in3.div(in2); name = "div"; break; case 4: bcOp = in3.rsub(in2); name = "rsub"; break; case 5: bcOp = in3.rdiv(in2); name = "rdiv"; break; case 6: //bcOp = sd.scalarFloorDiv(in3, in2); bcOp = new FloorDivOp(sd, in3, in2).outputVariable(); name = "floordiv"; break; case 7: //bcOp = sd.scalarFloorMod(in3, in2); bcOp = new FloorModOp(sd, in3, in2).outputVariable(); name = "floormod"; if(OpValidationSuite.IGNORE_FAILING){ //https://github.com/deeplearning4j/deeplearning4j/issues/5976 continue; } break; default: throw new RuntimeException(); } SDVariable outVar = sd.sum(bcOp); String msg = "(test " + i + ": " + name + ", dimensions=" + Arrays.toString(dim_sz1s) + ")"; log.info("*** Starting test: " + msg); INDArray in3Arr = Nd4j.randn(DataType.DOUBLE, 3, 4, 5).muli(100); INDArray in2Arr = Nd4j.randn(DataType.DOUBLE, otherShape).muli(100); sd.associateArrayWithVariable(in3Arr, in3); sd.associateArrayWithVariable(in2Arr, in2); TestCase tc = new TestCase(sd); String error = OpValidation.validate(tc); if(error != null){ failed.add(name); } } } assertEquals("Failed: " + failed, 0, failed.size()); }
Example 17
Source File: MiscOpValidation.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testGradientAutoBroadcast3() { //These tests: output size > input sizes Nd4j.getRandom().setSeed(12345); List<String> failed = new ArrayList<>(); //Test cases: in1Shape, in2Shape, shapeOf(op(in1,in2)) List<Triple<long[], long[], long[]>> testCases = new ArrayList<>(); testCases.add(new Triple<>(new long[]{3, 1}, new long[]{1, 4}, new long[]{3, 4})); testCases.add(new Triple<>(new long[]{3, 1}, new long[]{3, 4}, new long[]{3, 4})); testCases.add(new Triple<>(new long[]{3, 4}, new long[]{1, 4}, new long[]{3, 4})); testCases.add(new Triple<>(new long[]{3, 4, 1}, new long[]{1, 1, 5}, new long[]{3, 4, 5})); testCases.add(new Triple<>(new long[]{3, 4, 1}, new long[]{3, 1, 5}, new long[]{3, 4, 5})); testCases.add(new Triple<>(new long[]{3, 1, 5}, new long[]{1, 4, 1}, new long[]{3, 4, 5})); testCases.add(new Triple<>(new long[]{3, 1, 5}, new long[]{1, 4, 5}, new long[]{3, 4, 5})); testCases.add(new Triple<>(new long[]{3, 1, 5}, new long[]{3, 4, 5}, new long[]{3, 4, 5})); testCases.add(new Triple<>(new long[]{3, 1, 1, 1}, new long[]{1, 4, 5, 6}, new long[]{3, 4, 5, 6})); testCases.add(new Triple<>(new long[]{1, 1, 1, 6}, new long[]{3, 4, 5, 6}, new long[]{3, 4, 5, 6})); testCases.add(new Triple<>(new long[]{1, 4, 5, 1}, new long[]{3, 1, 1, 6}, new long[]{3, 4, 5, 6})); if(!OpValidationSuite.IGNORE_FAILING) { testCases.add(new Triple<>(new long[]{1, 6}, new long[]{3, 4, 5, 1}, new long[]{3, 4, 5, 6})); } for (val p : testCases) { for (int i = 0; i < 8; i++) { SameDiff sd = SameDiff.create(); SDVariable in3 = sd.var("in1", DataType.DOUBLE, p.getFirst()); SDVariable in2 = sd.var("in2", DataType.DOUBLE, p.getSecond()); String name; SDVariable bcOp; switch (i) { case 0: bcOp = in3.add(in2); name = "add"; break; case 1: bcOp = in3.sub(in2); name = "sub"; break; case 2: bcOp = in3.mul(in2); name = "mul"; break; case 3: bcOp = in3.div(in2); name = "div"; break; case 4: bcOp = in3.rsub(in2); name = "rsub"; break; case 5: bcOp = in3.rdiv(in2); name = "rdiv"; break; case 6: //bcOp = sd.scalarFloorDiv(in3, in2); bcOp = new FloorDivOp(sd, in3, in2).outputVariable(); name = "floordiv"; break; case 7: //bcOp = sd.scalarFloorMod(in3, in2); bcOp = new FloorModOp(sd, in3, in2).outputVariable(); name = "floormod"; if(OpValidationSuite.IGNORE_FAILING){ //https://github.com/deeplearning4j/deeplearning4j/issues/5976 continue; } break; default: throw new RuntimeException(); } SDVariable outVar = sd.sum(bcOp); String msg = "(test " + i + ": " + name + ", array 1 size =" + Arrays.toString(p.getFirst()) + ", array 2 size = " + Arrays.toString(p.getSecond()) + ")"; log.info("*** Starting test: " + msg); INDArray in3Arr = Nd4j.rand(DataType.DOUBLE, p.getFirst()).muli(100); INDArray in2Arr = Nd4j.rand(DataType.DOUBLE, p.getSecond()).muli(100); sd.associateArrayWithVariable(in3Arr, in3); sd.associateArrayWithVariable(in2Arr, in2); TestCase tc = new TestCase(sd); String error = OpValidation.validate(tc); if(error != null){ failed.add(name + " " + i + " - " + error); } } } assertEquals("Failed: " + failed, 0, failed.size()); }
Example 18
Source File: ShapeOpValidation.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testMeshGrid(){ List<String> failed = new ArrayList<>(); for( int rank=2; rank<=4; rank++ ){ SameDiff sd = SameDiff.create(); SDVariable[] arr = new SDVariable[rank]; String[] names = new String[rank]; for( int i=0; i<rank; i++ ){ INDArray in = Nd4j.linspace(1,3+i, 3+i).reshape(3+i).castTo(DataType.DOUBLE); arr[i] = sd.var("in"+i, in); names[i] = "meshgrid-" + i; } SDVariable[] meshgrid = sd.math().meshgrid(names, arr, false); TestCase tc = new TestCase(sd); long[] shape; if(rank == 2){ shape = new long[]{3,4}; } else if(rank == 3) { shape = new long[]{3,4,5}; } else { shape = new long[]{3,4,5,6}; } INDArray[] exp = new INDArray[shape.length]; //Nd4j.create(shape); for( int i=0; i<exp.length; i++ ){ exp[i] = Nd4j.create(DataType.DOUBLE, shape); long nTensors = exp[i].tensorsAlongDimension(i); for( long j=0; j<nTensors; j++ ){ INDArray tad = exp[i].tensorAlongDimension((int)j, i); tad.assign(arr[i].getArr()); } tc.expected(meshgrid[i], exp[i]); } SDVariable loss = null; for( int i=0; i<rank; i++ ){ if(i == 0) loss = meshgrid[i].std(true); else { loss = loss.add("loss-" + i, meshgrid[i].std(true)); } } String err = OpValidation.validate(tc, true); if(err != null) failed.add(err); } assertEquals(failed.toString(), 0, failed.size()); }
Example 19
Source File: TransformOpValidation.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testScalarOps() { int d0 = 2; int d1 = 3; int d2 = 4; int n = d0 * d1 * d2; List<String> failed = new ArrayList<>(); for (int i = 0; i < 11; i++) { for (char inOrder : new char[]{'c', 'f'}) { SameDiff sd = SameDiff.create(); INDArray inArr = Nd4j.linspace(1, n, n, DataType.DOUBLE).reshape('c', d0, d1, d2).dup(inOrder); SDVariable in = sd.var("in", inArr); TestCase tc = new TestCase(sd).gradientCheck(true); SDVariable out; String msg; switch (i) { case 0: out = in.mul(2); tc.expectedOutput(out.name(), inArr.mul(2)); msg = "mul - " + inOrder; break; case 1: out = in.div(2); tc.expectedOutput(out.name(), inArr.div(2)); msg = "div - " + inOrder; break; case 2: out = in.add(2); tc.expectedOutput(out.name(), inArr.add(2)); msg = "add - " + inOrder; break; case 3: out = in.sub(2); tc.expectedOutput(out.name(), inArr.sub(2)); msg = "sub - " + inOrder; break; case 4: out = in.rdiv(2); tc.expectedOutput(out.name(), inArr.rdiv(2)); msg = "rdiv - " + inOrder; break; case 5: out = in.rsub(2); tc.expectedOutput(out.name(), inArr.rsub(2)); msg = "rsub - " + inOrder; break; case 6: out = sd.math().pow(in, 2); tc.expectedOutput(out.name(), Transforms.pow(inArr, 2)); msg = "pow - " + inOrder; break; case 7: inArr.assign(Nd4j.rand(inArr.dataType(), inArr.shape()).muli(5).subi(2.5)); out = sd.math().floorMod(in, 2.0); tc.expected(out, Nd4j.getExecutioner().exec(new ScalarFMod(inArr.dup(), 2.0))); msg = "scalarFloorMod - " + inOrder; break; case 8: inArr.assign(Nd4j.rand(inArr.shape())); out = sd.scalarMax(in, 0.5); tc.expected(out, Transforms.max(inArr.dup(), 0.5)); msg = "scalarMax - " + inOrder; break; case 9: inArr.assign(Nd4j.rand(inArr.shape())); out = sd.scalarMin(in, 0.5); tc.expected(out, Transforms.min(inArr.dup(), 0.5)); msg = "scalarMin - " + inOrder; break; case 10: out = in.assign(0.5); tc.expected(out, Nd4j.valueArrayOf(inArr.shape(), 0.5)); msg = "scalarSet - " + inOrder; break; default: throw new RuntimeException(); } tc.testName(msg); SDVariable loss = sd.standardDeviation(out, true); log.info("Starting test: " + msg); String err = OpValidation.validate(tc, true); if (err != null) { failed.add(err); } } } assertEquals(failed.toString(), 0, failed.size()); }
Example 20
Source File: TestSessions.java From deeplearning4j with Apache License 2.0 | 3 votes |
@Test public void testInferenceSessionBasic(){ //So far: trivial test to check execution order SameDiff sd = SameDiff.create(); SDVariable ph1 = sd.placeHolder("x", DataType.FLOAT, 3,4); SDVariable ph2 = sd.placeHolder("y", DataType.FLOAT, 1,4); SDVariable out = ph1.add("out", ph2); //NOTE: normally sessions are internal and completely hidden from users InferenceSession is = new InferenceSession(sd); INDArray x = Nd4j.linspace(1, 12, 12).castTo(DataType.FLOAT).reshape(3,4); INDArray y = Nd4j.linspace(0.1, 0.4, 4, DataType.DOUBLE).castTo(DataType.FLOAT).reshape(1,4); INDArray outExp = x.addRowVector(y); Map<String,INDArray> m = new HashMap<>(); m.put("x", x); m.put("y", y); Map<String,INDArray> outMap = is.output(Collections.singletonList("out"), m, null, Collections.<String>emptyList(), null, At.defaultAt(Operation.TRAINING)); assertEquals(1, outMap.size()); assertEquals(outExp, outMap.get("out")); }