org.nd4j.autodiff.samediff.SameDiff Java Examples
The following examples show how to use
org.nd4j.autodiff.samediff.SameDiff.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: LayerOpValidation.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test(expected = IllegalArgumentException.class) public void exceptionThrown_WhenConv1DConfigInvalid() { int nIn = 3; int nOut = 4; int k = 2; int mb = 3; int img = 28; SameDiff sd = SameDiff.create(); INDArray wArr = Nd4j.create(k, nIn, nOut); INDArray inArr = Nd4j.create(mb, nIn, img); SDVariable in = sd.var("in", inArr); SDVariable w = sd.var("W", wArr); SDVariable[] vars = new SDVariable[]{in, w}; Conv1DConfig conv1DConfig = Conv1DConfig.builder() .k(k).p(-1).s(0) .paddingMode(PaddingMode.VALID) .build(); SDVariable out = sd.cnn().conv1d(in, w, conv1DConfig); }
Example #2
Source File: StridedSlice.java From nd4j with Apache License 2.0 | 6 votes |
public StridedSlice(SameDiff sameDiff, SDVariable in, @NonNull int[] begin, @NonNull int[] end, @NonNull int[] strides, int beginMask, int endMask, int ellipsisMask, int newAxisMask, int shrinkAxisMask){ super(null, sameDiff, new SDVariable[]{in}); this.begin = begin; this.end = end; this.strides = strides; this.beginMask = beginMask; this.endMask = endMask; this.ellipsisMask = ellipsisMask; this.newAxisMask = newAxisMask; this.shrinkAxisMask = shrinkAxisMask; //https://github.com/deeplearning4j/libnd4j/blob/master/include/ops/declarable/generic/parity_ops/strided_slice.cpp#L279 addIArgument(beginMask); addIArgument(ellipsisMask); addIArgument(endMask); addIArgument(newAxisMask); addIArgument(shrinkAxisMask); addIArgument(begin); addIArgument(end); addIArgument(strides); }
Example #3
Source File: MiscOpValidation.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void testFlatten() { SameDiff sameDiff = SameDiff.create(); INDArray x = Nd4j.linspace(DataType.DOUBLE, 1, 27, 1).reshape(3,3,3); SDVariable sdx = sameDiff.var(x); INDArray expected = Nd4j.linspace(DataType.DOUBLE,1,27,1); SDVariable output = new Flatten(sameDiff, 'c', sdx).outputVariable(); SDVariable loss = sameDiff.standardDeviation(sdx, true); sameDiff.addLossVariable(loss); TestCase tc = new TestCase(sameDiff) .gradientCheck(true) .expectedOutput(output.name(), expected); String err = OpValidation.validate(tc); assertNull(err); }
Example #4
Source File: AvgPooling2D.java From nd4j with Apache License 2.0 | 6 votes |
@Builder(builderMethodName = "builder") public AvgPooling2D(SameDiff sameDiff, SDVariable[] inputs, INDArray[] arrayInputs, INDArray[] arrayOutputs, Pooling2DConfig config) { super(null,sameDiff, inputs, false); if(arrayInputs != null) { addInputArgument(arrayInputs); } if(arrayOutputs != null) { addOutputArgument(arrayOutputs); } config.setType(Pooling2D.Pooling2DType.AVG); this.sameDiff = sameDiff; this.config = config; addArgs(); }
Example #5
Source File: TransformOpValidation.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Ignore("12/16/2019 https://github.com/eclipse/deeplearning4j/issues/8540") @Test public void testPad() { INDArray in = Nd4j.valueArrayOf(new long[]{5}, 1.0); INDArray pad = Nd4j.create(new double[]{1, 1}, new long[]{1, 2}).castTo(DataType.LONG); INDArray value = Nd4j.scalar(10.0); INDArray out = Nd4j.create(new long[]{7}); DynamicCustomOp op = DynamicCustomOp.builder("pad") .addInputs(in, pad, value) //.addInputs(in, pad) //Also doesn't work .addOutputs(out) .addIntegerArguments(0) //0 = CONSTANT .build(); INDArray exp = Nd4j.create(new double[]{10, 1, 1, 1, 1, 1, 10}); OpValidation.validate(new OpTestCase(op) .expectedOutput(0, exp)); SameDiff sd = SameDiff.create(); SDVariable s = sd.var("in", in); SDVariable padded = sd.nn().pad(s, sd.constant(pad), 10.0); String err2 = OpValidation.validate(new TestCase(sd).expected(padded, exp).gradientCheck(false)); assertNull(err2); }
Example #6
Source File: BaseBroadcastOp.java From deeplearning4j with Apache License 2.0 | 6 votes |
public BaseBroadcastOp(SameDiff sameDiff, SDVariable i_v, long[] shape, boolean inPlace, int[] dimension, Object[] extraArgs) { super(sameDiff, inPlace, extraArgs); this.dimension = dimension; if (i_v != null) { SameDiffUtils.validateDifferentialFunctionSameDiff(sameDiff, i_v, this); sameDiff.addArgsFor(new SDVariable[]{i_v},this); } else { throw new IllegalArgumentException("Input not null variable."); } }
Example #7
Source File: SameDiffLambdaVertex.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Override public SDVariable defineVertex(SameDiff sameDiff, Map<String, SDVariable> layerInput, Map<String, SDVariable> paramTable, Map<String, SDVariable> maskVars) { VertexInputs vi = getInputs(sameDiff); int i = 0; if (vi.map.size() == 0 && layerInput.size() > 0) { for (SDVariable v : layerInput.values()) { vi.map.put(i++, v); } } return defineVertex(sameDiff, getInputs(sameDiff)); }
Example #8
Source File: TFGraphMapper.java From deeplearning4j with Apache License 2.0 | 5 votes |
/** * Import a frozen TensorFlow protobuf (.pb) file from the specified file, with optional overrides * * @param f Frozen TensorFlow model pb file to import * @param importOverride Optional import override for specific ops, keyed by op name * @param opFilter Optional filter - ops to exclude/ignore * @return Imported graph */ public static SameDiff importGraph(@NonNull File f, Map<String, TFImportOverride> importOverride, TFOpImportFilter opFilter) { Preconditions.checkState(f.exists(), "File does not exist: %s", f); try (InputStream is = new BufferedInputStream(new FileInputStream(f))) { return importGraph(is, importOverride, opFilter); } catch (IOException e) { throw new RuntimeException(e); } }
Example #9
Source File: LoadTensorFlowMNISTMLP.java From dl4j-tutorials with MIT License | 5 votes |
public static void main(String[] args) throws Exception { final String FROZEN_MLP = new ClassPathResource(BASE_DIR + "/frozen_model.pb").getFile().getPath(); //Load placeholder inputs and corresponding predictions generated from tensorflow Map<String, INDArray> inputsPredictions = readPlaceholdersAndPredictions(); //Load the graph into samediff SameDiff graph = TFGraphMapper.getInstance().importGraph(new File(FROZEN_MLP)); //libnd4j executor //running with input_a array expecting to get prediction_a graph.associateArrayWithVariable(inputsPredictions.get("input_a"), graph.variableMap().get("input")); NativeGraphExecutioner executioner = new NativeGraphExecutioner(); INDArray[] results = executioner.executeGraph(graph); //returns an array of the outputs INDArray libnd4jPred = results[0]; System.out.println("LIBND4J exec prediction for input_a:\n" + libnd4jPred); if (libnd4jPred.equals(inputsPredictions.get("prediction_a"))) { //this is true and therefore predictions are equal System.out.println("Predictions are equal to tensorflow"); } else { throw new RuntimeException("Predictions don't match!"); } //Now to run with the samediff executor, with input_b array expecting to get prediction_b SameDiff graphSD = TFGraphMapper.getInstance().importGraph(new File(FROZEN_MLP)); //Reimport graph here, necessary for the 1.0 alpha release graphSD.associateArrayWithVariable(inputsPredictions.get("input_b"), graph.variableMap().get("input")); INDArray samediffPred = graphSD.execAndEndResult(); System.out.println("SameDiff exec prediction for input_b:\n" + samediffPred); if (samediffPred.equals(inputsPredictions.get("prediction_b"))) { //this is true and therefore predictions are equal System.out.println("Predictions are equal to tensorflow"); } //add to graph to demonstrate pytorch like capability System.out.println("Adding new op to graph.."); SDVariable linspaceConstant = graphSD.var("linspace", Nd4j.linspace(1, 10, 10)); SDVariable totalOutput = graphSD.getVariable("output").add(linspaceConstant); INDArray totalOutputArr = totalOutput.eval(); System.out.println(totalOutputArr); }
Example #10
Source File: Pooling2D.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Builder(builderMethodName = "sameDiffBuilder") @SuppressWarnings("Used in lombok") public Pooling2D(SameDiff sameDiff, SDVariable[] inputs, Pooling2DConfig config) { super(null, sameDiff, inputs, false); this.config = config; addArgs(); }
Example #11
Source File: DifferentialFunction.java From nd4j with Apache License 2.0 | 5 votes |
/** * * @param sameDiff * @param extraArgs */ public DifferentialFunction(SameDiff sameDiff, Object[] extraArgs) { this.sameDiff = sameDiff; setInstanceId(); this.extraArgs = extraArgs; }
Example #12
Source File: Mmul.java From nd4j with Apache License 2.0 | 5 votes |
@Override public void initFromOnnx(OnnxProto3.NodeProto node, SameDiff initWith, Map<String, OnnxProto3.AttributeProto> attributesForNode, OnnxProto3.GraphProto graph) { val isTransposeA = !attributesForNode.containsKey("transA") ? false : attributesForNode.get("transA").getI() > 0; val isTransposeB = !attributesForNode.containsKey("transB") ? false : attributesForNode.get("transB").getI() > 0; MMulTranspose mMulTranspose = MMulTranspose.builder() .transposeA(isTransposeA).transposeB(isTransposeB) .build(); this.mMulTranspose = mMulTranspose; }
Example #13
Source File: ScatterAdd.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Override public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) { TFGraphMapper.initFunctionFromProperties(nodeDef.getOp(), this, attributesForNode, nodeDef, graph); if (nodeDef.containsAttr("use_locking")) { if (nodeDef.getAttrOrThrow("use_locking").getB() == true) { bArguments.add(true); } else { bArguments.add(false); } } else bArguments.add(false); }
Example #14
Source File: LayerOpValidation.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testReluLayer() { Nd4j.getRandom().setSeed(12345); SameDiff sameDiff = SameDiff.create(); INDArray input = Nd4j.rand(new long[]{2, 3}); INDArray weights = Nd4j.rand(new long[]{3, 4}); INDArray b = Nd4j.rand(new long[]{4}); SDVariable sdInput = sameDiff.var("input", input); SDVariable sdWeights = sameDiff.var("weights", weights); SDVariable sdBias = sameDiff.var("bias", b); SDVariable res = sameDiff.nn().reluLayer(sdInput, sdWeights, sdBias); SDVariable loss = sameDiff.standardDeviation(res, true); INDArray exp = input.mmul(weights).addiRowVector(b); Transforms.relu(exp, false); TestCase tc = new TestCase(sameDiff) .gradientCheck(true) .expectedOutput(res.name(), exp); String err = OpValidation.validate(tc); assertNull(err); }
Example #15
Source File: LossFunctions.java From nd4j with Apache License 2.0 | 5 votes |
/** * L1 loss - sum of absolute errors. L = sum_i abs(predicted_i - actual_i) * * @param outputName * @param predictions * @param label * @param weights * @param reduction * @param dimensions * @return */ public static LossInfo l1(String outputName, SDVariable predictions, SDVariable label, SDVariable weights, Reduction reduction, int... dimensions){ LossInfo.Builder b = validate("l1", predictions, label, reduction); SameDiff sd = predictions.getSameDiff(); if(weights == null){ weights = sd.one("l1_loss_weights", SCALAR); } String name = (reduction == Reduction.NONE ? outputName : null); SDVariable preReduceLoss = sd.abs(predictions.sub(label)).mul(name, weights); return doReduce(sd, outputName, false, b, reduction, preReduceLoss, label, weights, dimensions); }
Example #16
Source File: ScatterUpdate.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Override public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) { if (nodeDef.containsAttr("use_locking")) { if (nodeDef.getAttrOrThrow("use_locking").getB() == true) { bArguments.add(true); } else { bArguments.add(false); } } else bArguments.add(false); }
Example #17
Source File: ReductionOpValidation.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testMoments() { for (int[] axes : new int[][]{{0}, {1}, {0, 1}}) { INDArray input = Nd4j.linspace(1, 12, 12).reshape(3, 4); SameDiff sd = SameDiff.create(); SDVariable in = sd.var("in", input); SDVariable[] moments = sd.math().moments(in, axes); INDArray expMean = input.mean(axes); INDArray expVar = input.var(false, axes); SDVariable loss; if (axes.length < 2) { loss = moments[0].add(moments[1]).std(true); } else { loss = moments[0].add(moments[1]).mean(); } String msg = Arrays.toString(axes); TestCase tc = new TestCase(sd) .testName(msg) .expected(moments[0], expMean) .expected(moments[1], expVar); String err = OpValidation.validate(tc); assertNull(err); } }
Example #18
Source File: ShapeOpValidation.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testGatherSimple() { SameDiff sameDiff = SameDiff.create(); INDArray arr = Nd4j.create(new float[]{1, 2, 3, 4}, new long[]{2, 2}); SDVariable x = sameDiff.var("x", arr); SDVariable result = sameDiff.gather(x, new int[]{1, 0}, 1); INDArray expected = Nd4j.create(new float[]{2, 1, 4, 3}, new long[]{2, 2}); assertEquals(expected, result.eval()); }
Example #19
Source File: MaxPooling2D.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Builder(builderMethodName = "sameDiffBuilder") @SuppressWarnings("Used in lombok") public MaxPooling2D(SameDiff sameDiff, SDVariable input, Pooling2DConfig config) { super(null, sameDiff, new SDVariable[]{input}, false); config.setType(Pooling2D.Pooling2DType.MAX); this.config = config; addArgs(); }
Example #20
Source File: Transpose.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Override public void initFromOnnx(Onnx.NodeProto node, SameDiff initWith, Map<String, Onnx.AttributeProto> attributesForNode, Onnx.GraphProto graph) { if (!attributesForNode.containsKey("perm")) { } else this.permuteDims = Ints.toArray(attributesForNode.get("perm").getIntsList()); }
Example #21
Source File: Conv2D.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Builder(builderMethodName = "sameDiffBuilder") public Conv2D(SameDiff sameDiff, SDVariable[] inputFunctions, Conv2DConfig config) { super(sameDiff, inputFunctions); initConfig(config); }
Example #22
Source File: DifferentialFunction.java From deeplearning4j with Apache License 2.0 | 5 votes |
/** * * @param sameDiff * @param extraArgs */ public DifferentialFunction(SameDiff sameDiff,boolean inPlace, Object[] extraArgs) { this.sameDiff = sameDiff; this.inPlace = inPlace; setInstanceId(); this.extraArgs = extraArgs; }
Example #23
Source File: Split.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Override public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) { val numSplits = (int) attributesForNode.get("num_split").getI(); this.numSplit = numSplits; addIArgument(numSplits); val splitDim = TFGraphMapper.getArrayFrom(TFGraphMapper.getNodeWithNameFromGraph(graph,nodeDef.getInput(0)),graph); if(splitDim != null) { this.splitDim = splitDim.getInt(0); addIArgument(splitDim.getInt(0)); } }
Example #24
Source File: LeakyReLUDerivative.java From nd4j with Apache License 2.0 | 4 votes |
public LeakyReLUDerivative(SameDiff sameDiff, SDVariable i_v1, SDVariable i_v2, boolean inPlace, double alpha) { super(sameDiff, i_v1, i_v2, inPlace); this.alpha = alpha; this.extraArgs = new Object[] {alpha}; }
Example #25
Source File: MergeSum.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Override public void initFromOnnx(Onnx.NodeProto node, SameDiff initWith, Map<String, Onnx.AttributeProto> attributesForNode, Onnx.GraphProto graph) { super.initFromOnnx(node, initWith, attributesForNode, graph); }
Example #26
Source File: SDCNN.java From deeplearning4j with Apache License 2.0 | 4 votes |
public SDCNN(SameDiff sameDiff) { super(sameDiff); }
Example #27
Source File: Sqrt.java From nd4j with Apache License 2.0 | 4 votes |
public Sqrt(SameDiff sameDiff, SDVariable i_v, Object[] extraArgs) { super(sameDiff, i_v, extraArgs); }
Example #28
Source File: SparseSoftmaxCrossEntropyLossWithLogits.java From deeplearning4j with Apache License 2.0 | 4 votes |
public SparseSoftmaxCrossEntropyLossWithLogits(@NonNull SameDiff sameDiff, @NonNull SDVariable logits, @NonNull SDVariable labels) { super(null, sameDiff, new SDVariable[]{labels, logits}, false); }
Example #29
Source File: Cast.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Override public void initFromTensorFlow(NodeDef nodeDef, SameDiff initWith, Map<String, AttrValue> attributesForNode, GraphDef graph) { TFGraphMapper.initFunctionFromProperties(nodeDef.getOp(), this, attributesForNode, nodeDef, graph); addArgs(); }
Example #30
Source File: Assign.java From deeplearning4j with Apache License 2.0 | 4 votes |
public Assign(SameDiff sameDiff, SDVariable x, SDVariable y){ super(null, sameDiff, new SDVariable[]{x,y}); }