Java Code Examples for org.nd4j.autodiff.samediff.SameDiff#summary()
The following examples show how to use
org.nd4j.autodiff.samediff.SameDiff#summary() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: LayerOpValidation.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testXwPlusB() { Nd4j.getRandom().setSeed(12345); SameDiff sameDiff = SameDiff.create(); INDArray input = Nd4j.rand(new long[]{2, 3}); INDArray weights = Nd4j.rand(new long[]{3, 4}); INDArray b = Nd4j.rand(new long[]{4}); SDVariable sdInput = sameDiff.var("input", input); SDVariable sdWeights = sameDiff.var("weights", weights); SDVariable sdBias = sameDiff.var("bias", b); SDVariable res = sameDiff.nn().linear(sdInput, sdWeights, sdBias); SDVariable loss = sameDiff.standardDeviation(res, true); INDArray exp = input.mmul(weights).addiRowVector(b); TestCase tc = new TestCase(sameDiff) .gradientCheck(true) .expectedOutput(res.name(), exp); // System.out.println(sameDiff.summary()); // System.out.println("============================"); sameDiff.summary(); sameDiff.createGradFunction(); // System.out.println(sameDiff.getFunction("grad").summary()); sameDiff.getFunction("grad").summary(); String err = OpValidation.validate(tc); assertNull(err); }
Example 2
Source File: TensorFlowImportTest.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testIntermediateReduction() throws Exception { Nd4j.create(1); SameDiff tg = TFGraphMapper.importGraph(new ClassPathResource("tf_graphs/reduce_dim.pb.txt").getInputStream()); SDVariable sumResultVar = tg.getVariable("Sum"); /* val func = tg.getFunctionForVertexId(sumResultVar.getVertexId()); assertEquals(0,func.getDimensions()[0]); assertEquals(3,tg.variables().size()); assertNotNull(sumResultVar); assertNotNull(tg.getFunctionForVertexId(sumResultVar.getVertexId())); System.out.println(tg.variables()); assertNotNull(func.getDimensions()); assertEquals(0,func.getDimensions()[0]);*/ ByteBuffer fb = tg.asFlatBuffers(true); assertNotNull(fb); FlatGraph graph = FlatGraph.getRootAsFlatGraph(fb); assertEquals(1, graph.nodesLength()); assertEquals(2, graph.variablesLength()); assertEquals("Sum", graph.nodes(0).name()); FlatNode nodeSum = graph.nodes(0); assertEquals(2, nodeSum.inputPairedLength()); // we expect these inputs to be 1:0 and 2:0 respectively // where 1 (or 2) is a graph node id // and :0 is graph node output index, which is 0 because that's predefined variables val in0 = nodeSum.inputPaired(0); val in1 = nodeSum.inputPaired(1); assertEquals(1, in0.first()); assertEquals(0, in0.second()); assertEquals(2, in1.first()); assertEquals(0, in1.second()); // System.out.println(tg.summary()); tg.summary(); int dimensionsLength = nodeSum.dimensionsLength(); assertEquals(1, dimensionsLength); int d = nodeSum.dimensions(0); assertEquals(1, d); //log.info("nodeSum inputs length: {}; inputPaired length: {}",nodeSum.inputLength(), nodeSum.inputPairedLength()); tg.asFlatFile(new File("../../../libnd4j/tests_cpu/resources/reduce_dim.fb")); /*val executioner = new NativeGraphExecutioner(); val exp = Nd4j.create(3, 1).assign(3); val results = executioner.executeGraph(tg, configuration); assertNotNull(results); assertEquals(1, results.length); assertEquals(exp, results[0]); */ }