org.nd4j.autodiff.execution.NativeGraphExecutioner Java Examples

The following examples show how to use org.nd4j.autodiff.execution.NativeGraphExecutioner. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: SameDiffInferenceExecutioner.java    From konduit-serving with Apache License 2.0 5 votes vote down vote up
@Override
public void initialize(ModelLoader<SameDiff> model, ParallelInferenceConfig config) {
    nativeGraphExecutioner = new NativeGraphExecutioner();
    this.modelLoader = model;
    this.model = model();
    SameDiffModelLoader sameDiffModelLoader = (SameDiffModelLoader) model;
    this.inputs = sameDiffModelLoader.getInputNames();
    this.outputs = sameDiffModelLoader.getOutputNames();
    log.info("Inference execution loaded with inputs " + inputs + " and outputs " + outputs);
}
 
Example #2
Source File: LoadTensorFlowMNISTMLP.java    From dl4j-tutorials with MIT License 5 votes vote down vote up
public static void main(String[] args) throws Exception {
    final String FROZEN_MLP = new ClassPathResource(BASE_DIR + "/frozen_model.pb").getFile().getPath();

    //Load placeholder inputs and corresponding predictions generated from tensorflow
    Map<String, INDArray> inputsPredictions = readPlaceholdersAndPredictions();

    //Load the graph into samediff
    SameDiff graph = TFGraphMapper.getInstance().importGraph(new File(FROZEN_MLP));
    //libnd4j executor
    //running with input_a array expecting to get prediction_a
    graph.associateArrayWithVariable(inputsPredictions.get("input_a"), graph.variableMap().get("input"));
    NativeGraphExecutioner executioner = new NativeGraphExecutioner();
    INDArray[] results = executioner.executeGraph(graph); //returns an array of the outputs
    INDArray libnd4jPred = results[0];
    System.out.println("LIBND4J exec prediction for input_a:\n" + libnd4jPred);
    if (libnd4jPred.equals(inputsPredictions.get("prediction_a"))) {
        //this is true and therefore predictions are equal
        System.out.println("Predictions are equal to tensorflow");
    } else {
        throw new RuntimeException("Predictions don't match!");
    }

    //Now to run with the samediff executor, with input_b array expecting to get prediction_b
    SameDiff graphSD = TFGraphMapper.getInstance().importGraph(new File(FROZEN_MLP)); //Reimport graph here, necessary for the 1.0 alpha release
    graphSD.associateArrayWithVariable(inputsPredictions.get("input_b"), graph.variableMap().get("input"));
    INDArray samediffPred = graphSD.execAndEndResult();
    System.out.println("SameDiff exec prediction for input_b:\n" + samediffPred);
    if (samediffPred.equals(inputsPredictions.get("prediction_b"))) {
        //this is true and therefore predictions are equal
        System.out.println("Predictions are equal to tensorflow");
    }
    //add to graph to demonstrate pytorch like capability
    System.out.println("Adding new op to graph..");
    SDVariable linspaceConstant = graphSD.var("linspace", Nd4j.linspace(1, 10, 10));
    SDVariable totalOutput = graphSD.getVariable("output").add(linspaceConstant);
    INDArray totalOutputArr = totalOutput.eval();
    System.out.println(totalOutputArr);

}
 
Example #3
Source File: TensorFlowImportTest.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testIntermediateReduction() throws Exception {
    Nd4j.create(1);
    val tg = TFGraphMapper.getInstance().importGraph(new ClassPathResource("tf_graphs/reduce_dim.pb.txt").getInputStream());
    val sumResultVar = tg.getVariable("Sum");

  /*  val func = tg.getFunctionForVertexId(sumResultVar.getVertexId());
    assertEquals(0,func.getDimensions()[0]);
    assertEquals(3,tg.variables().size());
    assertNotNull(sumResultVar);
    assertNotNull(tg.getFunctionForVertexId(sumResultVar.getVertexId()));
    System.out.println(tg.variables());

    assertNotNull(func.getDimensions());
    assertEquals(0,func.getDimensions()[0]);*/

    val fb = tg.asFlatBuffers();
    assertNotNull(fb);

    val graph = FlatGraph.getRootAsFlatGraph(fb);
    assertEquals(1, graph.nodesLength());
    assertEquals(2, graph.variablesLength());

    assertEquals("Sum", graph.nodes(0).name());

    val nodeSum = graph.nodes(0);
    assertEquals(2, nodeSum.inputPairedLength());


    // we expect these inputs to be 1:0 and 2:0 respectively
    // where 1 (or 2) is a graph node id
    // and :0 is graph node output index, which is 0 because that's predefined variables
    val in0 = nodeSum.inputPaired(0);
    val in1 = nodeSum.inputPaired(1);

    assertEquals(1, in0.first());
    assertEquals(0, in0.second());

    assertEquals(2, in1.first());
    assertEquals(0, in1.second());


    assertEquals(1, nodeSum.dimensions(1));


    //log.info("nodeSum inputs length: {}; inputPaired length: {}",nodeSum.inputLength(), nodeSum.inputPairedLength());

    //tg.asFlatFile(new File("../../../libnd4j/tests_cpu/resources/reduce_dim.fb"));
    val executioner = new NativeGraphExecutioner();

    val exp = Nd4j.create(3, 1).assign(3);

    val results = executioner.executeGraph(tg, configuration);

    assertNotNull(results);
    assertEquals(1, results.length);
    assertEquals(exp, results[0]);
}