Java Code Examples for org.nd4j.linalg.api.ndarray.INDArray#equals()
The following examples show how to use
org.nd4j.linalg.api.ndarray.INDArray#equals() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestDataVecDataSetFunctions.java From deeplearning4j with Apache License 2.0 | 6 votes |
private static boolean masksEqual(INDArray m1, INDArray m2) { if (m1 == null && m2 == null) { return true; } if (m1 != null && m2 != null) { return m1.equals(m2); } //One is null, other is not. Null and ones mask arrays are equal though if (m1 != null && !m1.equals(Nd4j.ones(m1.shape()))) { return false; } if (m2 != null && !m2.equals(Nd4j.ones(m2.shape()))) { return false; } return true; }
Example 2
Source File: BaseUnderSamplingPreProcessor.java From deeplearning4j with Apache License 2.0 | 6 votes |
private void validateData(INDArray label, INDArray labelMask) { if (label.rank() != 3) { throw new IllegalArgumentException( "UnderSamplingByMaskingPreProcessor can only be applied to a time series dataset"); } if (label.size(1) > 2) { throw new IllegalArgumentException( "UnderSamplingByMaskingPreProcessor can only be applied to labels that represent binary classes. Label size was found to be " + label.size(1) + ".Expecting size=1 or size=2."); } if (label.size(1) == 2) { //check if label is of size one hot INDArray sum1 = label.sum(1).mul(labelMask); INDArray floatMask = labelMask.castTo(label.dataType()); if (!sum1.equals(floatMask)) { throw new IllegalArgumentException("Labels of size minibatchx2xtimesteps are expected to be one hot." + label.toString() + "\n is not one-hot"); } } }
Example 3
Source File: SDVariable.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Override public boolean equals(Object o){ if(o == this) return true; if(!(o instanceof SDVariable)) return false; SDVariable s = (SDVariable)o; if(!varName.equals(s.varName)) return false; if(variableType != s.variableType) return false; if(dataType != s.dataType) return false; if(variableType == VariableType.VARIABLE || variableType == VariableType.CONSTANT){ INDArray a1 = getArr(); INDArray a2 = s.getArr(); return a1.equals(a2); } return true; }
Example 4
Source File: NDArrayTestsFortran.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void testPermute() { INDArray n = Nd4j.create(Nd4j.linspace(1, 20, 20, DataType.DOUBLE).data(), new long[] {5, 4}); INDArray transpose = n.transpose(); INDArray permute = n.permute(1, 0); assertEquals(permute, transpose); assertEquals(transpose.length(), permute.length(), 1e-1); INDArray toPermute = Nd4j.create(Nd4j.linspace(0, 7, 8, DataType.DOUBLE).data(), new long[] {2, 2, 2}); INDArray permuted = toPermute.dup().permute(2, 1, 0); boolean eq = toPermute.equals(permuted); assertNotEquals(toPermute, permuted); INDArray permuteOther = toPermute.permute(1, 2, 0); for (int i = 0; i < permuteOther.slices(); i++) { INDArray toPermutesliceI = toPermute.slice(i); INDArray permuteOtherSliceI = permuteOther.slice(i); permuteOtherSliceI.toString(); assertNotEquals(toPermutesliceI, permuteOtherSliceI); } assertArrayEquals(permuteOther.shape(), toPermute.shape()); assertNotEquals(toPermute, permuteOther); }
Example 5
Source File: NumpyFormatTests.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test(expected = RuntimeException.class) public void readNumpyCorruptHeader2() throws Exception { File f = testDir.newFolder(); File fValid = new ClassPathResource("numpy_arrays/arange_3,4_float32.npy").getFile(); byte[] numpyBytes = FileUtils.readFileToByteArray(fValid); for( int i=1; i<10; i++ ){ numpyBytes[i] = 0; } File fCorrupt = new File(f, "corrupt.npy"); FileUtils.writeByteArrayToFile(fCorrupt, numpyBytes); INDArray exp = Nd4j.arange(12).castTo(DataType.FLOAT).reshape(3,4); INDArray act1 = Nd4j.createFromNpyFile(fValid); assertEquals(exp, act1); INDArray probablyShouldntLoad = Nd4j.createFromNpyFile(fCorrupt); //Loads fine boolean eq = exp.equals(probablyShouldntLoad); //And is actually equal content }
Example 6
Source File: NumpyFormatTests.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test(expected = RuntimeException.class) public void readNumpyCorruptHeader1() throws Exception { File f = testDir.newFolder(); File fValid = new ClassPathResource("numpy_arrays/arange_3,4_float32.npy").getFile(); byte[] numpyBytes = FileUtils.readFileToByteArray(fValid); for( int i=0; i<10; i++ ){ numpyBytes[i] = 0; } File fCorrupt = new File(f, "corrupt.npy"); FileUtils.writeByteArrayToFile(fCorrupt, numpyBytes); INDArray exp = Nd4j.arange(12).castTo(DataType.FLOAT).reshape(3,4); INDArray act1 = Nd4j.createFromNpyFile(fValid); assertEquals(exp, act1); INDArray probablyShouldntLoad = Nd4j.createFromNpyFile(fCorrupt); //Loads fine boolean eq = exp.equals(probablyShouldntLoad); //And is actually equal content }
Example 7
Source File: WordVectorSerializerTest.java From deeplearning4j with Apache License 2.0 | 5 votes |
private double arraysSimilarity(INDArray array1, INDArray array2) { if (array1.equals(array2)) return 1.0; INDArray vector = Transforms.unitVec(array1); INDArray vector2 = Transforms.unitVec(array2); if (vector == null || vector2 == null) return -1; return Nd4j.getBlasWrapper().dot(vector, vector2); }
Example 8
Source File: LoadTensorFlowMNISTMLP.java From dl4j-tutorials with MIT License | 5 votes |
public static void main(String[] args) throws Exception { final String FROZEN_MLP = new ClassPathResource(BASE_DIR + "/frozen_model.pb").getFile().getPath(); //Load placeholder inputs and corresponding predictions generated from tensorflow Map<String, INDArray> inputsPredictions = readPlaceholdersAndPredictions(); //Load the graph into samediff SameDiff graph = TFGraphMapper.getInstance().importGraph(new File(FROZEN_MLP)); //libnd4j executor //running with input_a array expecting to get prediction_a graph.associateArrayWithVariable(inputsPredictions.get("input_a"), graph.variableMap().get("input")); NativeGraphExecutioner executioner = new NativeGraphExecutioner(); INDArray[] results = executioner.executeGraph(graph); //returns an array of the outputs INDArray libnd4jPred = results[0]; System.out.println("LIBND4J exec prediction for input_a:\n" + libnd4jPred); if (libnd4jPred.equals(inputsPredictions.get("prediction_a"))) { //this is true and therefore predictions are equal System.out.println("Predictions are equal to tensorflow"); } else { throw new RuntimeException("Predictions don't match!"); } //Now to run with the samediff executor, with input_b array expecting to get prediction_b SameDiff graphSD = TFGraphMapper.getInstance().importGraph(new File(FROZEN_MLP)); //Reimport graph here, necessary for the 1.0 alpha release graphSD.associateArrayWithVariable(inputsPredictions.get("input_b"), graph.variableMap().get("input")); INDArray samediffPred = graphSD.execAndEndResult(); System.out.println("SameDiff exec prediction for input_b:\n" + samediffPred); if (samediffPred.equals(inputsPredictions.get("prediction_b"))) { //this is true and therefore predictions are equal System.out.println("Predictions are equal to tensorflow"); } //add to graph to demonstrate pytorch like capability System.out.println("Adding new op to graph.."); SDVariable linspaceConstant = graphSD.var("linspace", Nd4j.linspace(1, 10, 10)); SDVariable totalOutput = graphSD.getVariable("output").add(linspaceConstant); INDArray totalOutputArr = totalOutput.eval(); System.out.println(totalOutputArr); }
Example 9
Source File: DataSet.java From deeplearning4j with Apache License 2.0 | 5 votes |
private static boolean equalOrBothNull(INDArray first, INDArray second) { if (first == null && second == null) return true; //Both are null: ok if (first == null || second == null) return false; //Only one is null, not both return first.equals(second); }
Example 10
Source File: ConvDataFormatTests.java From deeplearning4j with Apache License 2.0 | 5 votes |
private static List<String> differentGrads(Gradient g1, Gradient g2){ List<String> differs = new ArrayList<>(); Map<String,INDArray> m1 = g1.gradientForVariable(); Map<String,INDArray> m2 = g2.gradientForVariable(); for(String s : m1.keySet()){ INDArray a1 = m1.get(s); INDArray a2 = m2.get(s); if(!a1.equals(a2)){ differs.add(s); } } return differs; }
Example 11
Source File: DataSet.java From nd4j with Apache License 2.0 | 5 votes |
private static boolean equalOrBothNull(INDArray first, INDArray second) { if (first == null && second == null) return true; //Both are null: ok if (first == null || second == null) return false; //Only one is null, not both return first.equals(second); }
Example 12
Source File: MultiDataSet.java From nd4j with Apache License 2.0 | 5 votes |
private INDArray[] loadINDArrays(int numArrays, DataInputStream dis, boolean isMask) throws IOException { INDArray[] result = null; if (numArrays > 0) { result = new INDArray[numArrays]; for (int i = 0; i < numArrays; i++) { INDArray arr = Nd4j.read(dis); result[i] = isMask && arr.equals(EMPTY_MASK_ARRAY_PLACEHOLDER.get()) ? null : arr; } } return result; }
Example 13
Source File: ParagraphVectorsTest.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Deprecated private double arraysSimilarity(@NonNull INDArray array1, @NonNull INDArray array2) { if (array1.equals(array2)) return 1.0; INDArray vector = Transforms.unitVec(array1); INDArray vector2 = Transforms.unitVec(array2); if (vector == null || vector2 == null) return -1; return Transforms.cosineSim(vector, vector2); }
Example 14
Source File: GradCheckTransforms.java From nd4j with Apache License 2.0 | 5 votes |
@Test public void testDynamicStitch() { SameDiff sd = SameDiff.create(); INDArray ia = Nd4j.create(new float[]{5, 1, 3}, new int[]{1, 3}); INDArray ib = Nd4j.create(new float[]{7, 2, 4}, new int[]{1, 3}); INDArray indexA = Nd4j.create(new float[]{0, 1, 4}, new int[]{1, 3}); INDArray indexB = Nd4j.create(new float[]{2, 3, 5}, new int[]{1, 3}); INDArray expOut = Nd4j.create(new int[]{1, 6}); DynamicCustomOp dynamicStitch = DynamicCustomOp.builder("dynamic_stitch") .addInputs(indexA, indexB, ia, ib) .addOutputs(expOut).build(); Nd4j.getExecutioner().exec(dynamicStitch); SDVariable in1 = sd.var("in1", new int[]{1, 3}); SDVariable in2 = sd.var("in2", new int[]{1, 3}); SDVariable index1 = sd.var("index1", new int[]{1, 3}); SDVariable index2 = sd.var("index2", new int[]{1, 3}); sd.associateArrayWithVariable(ia, in1); sd.associateArrayWithVariable(ib, in2); sd.associateArrayWithVariable(indexA, index1); sd.associateArrayWithVariable(indexB, index2); SDVariable t = sd.dynamicStitch(new SDVariable[]{index1, index2}, new SDVariable[]{in1, in2}); SDVariable loss = sd.mean("loss", t); sd.exec(); INDArray out = t.getArr(); if (!expOut.equals(out)) { log.error("forward failed"); } }
Example 15
Source File: GradCheckTransforms.java From nd4j with Apache License 2.0 | 5 votes |
@Test public void testDepthToSpace() { Nd4j.getRandom().setSeed(1337); int miniBatch = 128; int blockSize = 4; String dataFormat = "NHWC"; int isNHWC = dataFormat.equals("NHWC") ? 1 : 0; int[] inputShape = new int[]{miniBatch, 2, 2, blockSize * blockSize}; INDArray input = Nd4j.randn(inputShape); SameDiff sd = SameDiff.create(); SDVariable sdInput = sd.var("in", inputShape); INDArray expOut = Nd4j.create(miniBatch, 2 * blockSize, 2 * blockSize, 1); DynamicCustomOp op = DynamicCustomOp.builder("depth_to_space") .addInputs(input) .addIntegerArguments(blockSize, isNHWC) .addOutputs(expOut).build(); Nd4j.getExecutioner().exec(op); sd.associateArrayWithVariable(input, sdInput); SDVariable t = sd.depthToSpace(sdInput, blockSize, dataFormat); SDVariable loss = sd.mean("loss", t); sd.exec(); INDArray out = t.getArr(); if (!expOut.equals(out)) { log.info("depth to space failed on forward"); } try { GradCheckUtil.checkGradients(sd); } catch (Exception e) { e.printStackTrace(); } }
Example 16
Source File: GradCheckTransforms.java From nd4j with Apache License 2.0 | 5 votes |
@Test public void testSpaceToDepth() { Nd4j.getRandom().setSeed(1337); int miniBatch = 128; int blockSize = 4; String dataFormat = "NHWC"; int isNHWC = dataFormat.equals("NHWC") ? 1 : 0; int[] inputShape = new int[]{miniBatch, 2 * blockSize, 2 * blockSize, 1}; INDArray input = Nd4j.randn(inputShape); SameDiff sd = SameDiff.create(); SDVariable sdInput = sd.var("in", inputShape); INDArray expOut = Nd4j.create(miniBatch, 2, 2, blockSize * blockSize); DynamicCustomOp op = DynamicCustomOp.builder("space_to_depth") .addInputs(input) .addIntegerArguments(blockSize, isNHWC) .addOutputs(expOut).build(); Nd4j.getExecutioner().exec(op); sd.associateArrayWithVariable(input, sdInput); SDVariable t = sd.spaceToDepth(sdInput, blockSize, dataFormat); SDVariable loss = sd.mean("loss", t); sd.exec(); INDArray out = t.getArr(); if (!expOut.equals(out)) { log.info("depth to space failed on forward"); } try { GradCheckUtil.checkGradients(sd); } catch (Exception e) { e.printStackTrace(); } }
Example 17
Source File: GradCheckTransforms.java From nd4j with Apache License 2.0 | 5 votes |
@Test public void testCross() { INDArray a = Nd4j.create(new float[]{4, 2, 1}, new int[]{1, 3}); INDArray b = Nd4j.create(new float[]{1, 3, 4}, new int[]{1, 3}); INDArray expOut = Nd4j.create(1, 3); DynamicCustomOp op = DynamicCustomOp.builder("cross").addInputs(a, b).addOutputs(expOut).build(); Nd4j.getExecutioner().exec(op); SameDiff sd = SameDiff.create(); SDVariable sdA = sd.var("a", expOut.shape()); SDVariable sdB = sd.var("b", expOut.shape()); sd.associateArrayWithVariable(a, sdA); sd.associateArrayWithVariable(b, sdB); SDVariable t = sd.cross(sdA, sdB); SDVariable loss = sd.mean("loss", t); sd.exec(); INDArray out = t.getArr(); if (!expOut.equals(out)) { log.info("batch to space failed on forward"); } try { GradCheckUtil.checkGradients(sd); } catch (Exception e) { e.printStackTrace(); } }
Example 18
Source File: TFGraphTestAllHelper.java From deeplearning4j with Apache License 2.0 | 4 votes |
public static void checkIntermediate(Map<String, INDArray> inputs, String modelName, String baseDir, String modelFileName, ExecuteWith execType, BiFunction<File,String,SameDiff> loader, Double maxRelErrorOverride, Double minAbsErrorOverride, File localTestDir, boolean printArraysDebugging) throws IOException { Preconditions.checkArgument((maxRelErrorOverride == null) == (minAbsErrorOverride == null), "Both maxRelErrorOverride and minAbsErrorOverride" + " must be null or both must be provided"); Nd4j.EPS_THRESHOLD = 1e-3; OpExecOrderListener listener = new OpExecOrderListener(); //Used to collect exec order Pair<SameDiff, Map<String,INDArray>> p = getGraphAfterExec(baseDir, modelFileName, modelName, inputs, execType, loader, Collections.singletonList(listener), null, printArraysDebugging); SameDiff graph = p.getFirst(); Map<String,INDArray> sdPredictions = p.getSecond(); //Collect coverage info about ops OpValidation.collectTensorflowImportCoverage(graph); if (!execType.equals(ExecuteWith.JUST_PRINT)) { int count = 0; //Evaluate the nodes in their execution order - this is useful for debugging (as we want the *first* failure // to be detected before later failures) List<String> varNames = new ArrayList<>(); Map<String,SameDiffOp> fns = graph.getOps(); List<String> execOrder = listener.getOpNamesList(); for(String opName : execOrder){ String[] outputs = graph.getOutputsForOp(fns.get(opName).getOp()); Collections.addAll(varNames, outputs); } for (String varName : varNames) { if (!inputs.containsKey(varName)) { //avoiding placeholders INDArray tfValue = intermediateVars(modelName, baseDir, varName, localTestDir); if (tfValue == null) { continue; } log.info("Starting check: variable {}", varName); if (skipNode(modelName, varName)) { log.info("\n\tFORCING no check on " + varName); } else { assertArrayEquals("Shape not equal on node " + varName, tfValue.shape(), graph.getVariable(varName).getShape()); INDArray sdVal = sdPredictions.get(varName); if(maxRelErrorOverride != null){ INDArray diff = Transforms.abs(tfValue.sub(sdVal), false); INDArray absErrorMask = diff.gte(minAbsErrorOverride); //value 1 if x[i] > minAbsError; value 0 otherwise. Used to get rid of 1e-30 vs. 1e-29 type failures INDArray sumAbs = Transforms.abs(tfValue, true).addi(Transforms.abs(sdVal, true)); BooleanIndexing.replaceWhere(sumAbs, 1.0, Conditions.equals(0.0)); //Can only get 0.0 if both are zeros - need to avoid 0/0=NaN INDArray relError = diff.divi(sumAbs); relError.muli(absErrorMask); int countExceeds = Nd4j.getExecutioner().exec(new MatchCondition(relError, Conditions.greaterThan(maxRelErrorOverride))).getInt(0); double maxRE = -1; //Mainly used for analysis in debugger: DifferentialFunction op = null; String[] opInputs = null; if(countExceeds > 0){ maxRE = relError.maxNumber().doubleValue(); //Find the op that this variable is produced by op = graph.getVariableOutputOp(varName); opInputs = graph.getInputsForOp(op); } assertEquals( varName + ": " + countExceeds + " values exceed maxRelError=" + maxRelErrorOverride + " with minAbsError=" + minAbsErrorOverride + "; largest observed relError=" + maxRE, 0, countExceeds); } else { // assertEquals("Value not equal on node " + varName, tfValue, sdVal); if(tfValue.equals(sdVal)){ System.out.println("Pass: " + varName); } else { System.out.println("FAIL: " + varName); System.out.println("TF:\n" + tfValue); System.out.println("SD:\n" + sdVal); } } log.info("Values and shapes equal for {}", varName); count++; } } } assertTrue("No intermediate variables were checked", count > 0); } Nd4j.EPS_THRESHOLD = 1e-5; }
Example 19
Source File: SpTree.java From deeplearning4j with Apache License 2.0 | 4 votes |
private boolean insert(int index) { /*MemoryWorkspace workspace = workspaceMode == WorkspaceMode.NONE ? new DummyWorkspace() : Nd4j.getWorkspaceManager().getWorkspaceForCurrentThread( workspaceConfigurationExternal, workspaceExternal); try (MemoryWorkspace ws = workspace.notifyScopeEntered())*/ { INDArray point = data.slice(index); /*boolean contains = false; SpTreeCell op = new SpTreeCell(boundary.corner(), boundary.width(), point, N, contains); Nd4j.getExecutioner().exec(op); op.getOutputArgument(0).getScalar(0); if (!contains) return false;*/ if (!boundary.contains(point)) return false; cumSize++; double mult1 = (double) (cumSize - 1) / (double) cumSize; double mult2 = 1.0 / (double) cumSize; centerOfMass.muli(mult1); centerOfMass.addi(point.mul(mult2)); // If there is space in this quad tree and it is a leaf, add the object here if (isLeaf() && size < nodeCapacity) { this.index[size] = index; indices.add(point); size++; return true; } for (int i = 0; i < size; i++) { INDArray compPoint = data.slice(this.index[i]); if (compPoint.equals(point)) return true; } if (isLeaf()) subDivide(); // Find out where the point can be inserted for (int i = 0; i < numChildren; i++) { if (children[i].insert(index)) return true; } throw new IllegalStateException("Shouldn't reach this state"); } }
Example 20
Source File: ND4JTestUtils.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Override public Boolean apply(INDArray i1, INDArray i2) { return i1.equals(i2); }