Java Code Examples for org.nd4j.linalg.api.ndarray.INDArray#close()
The following examples show how to use
org.nd4j.linalg.api.ndarray.INDArray#close() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MultiLayerNetwork.java From deeplearning4j with Apache License 2.0 | 6 votes |
/** * Close the network and deallocate all native memory, including: parameters, gradients, updater memory and workspaces * Note that the network should not be used again for any purpose after it has been closed */ @Override public void close(){ //Close the INDArray and dealloc if(flattenedParams.closeable()) flattenedParams.close(); if(flattenedGradients != null && flattenedGradients.closeable()) flattenedGradients.close(); Updater u = getUpdater(false); if(u != null && u.getStateViewArray() != null) { INDArray state = u.getStateViewArray(); if(state.closeable()) state.close(); } Nd4j.getWorkspaceManager().destroyAllWorkspacesForCurrentThread(); System.gc(); }
Example 2
Source File: TestInstantiation.java From deeplearning4j with Apache License 2.0 | 6 votes |
public void testInitPretrained(ZooModel model, long[] inShape, long[] outShape) throws Exception { ignoreIfCuda(); assertTrue(model.pretrainedAvailable(PretrainedType.IMAGENET)); ComputationGraph initializedModel = (ComputationGraph) model.initPretrained(); INDArray[] result = initializedModel.output(Nd4j.rand(inShape)); assertArrayEquals(result[0].shape(),outShape); // clean up for current model Nd4j.getWorkspaceManager().destroyAllWorkspacesForCurrentThread(); initializedModel.params().close(); for(INDArray arr : result){ arr.close(); } System.gc(); }
Example 3
Source File: TestInstantiation.java From deeplearning4j with Apache License 2.0 | 6 votes |
public void testInitRandomModel(ZooModel model, long[] inShape, long[] outShape){ ignoreIfCuda(); //Test initialization of NON-PRETRAINED models log.info("Testing {}", model.getClass().getSimpleName()); ComputationGraph initializedModel = model.init(); INDArray f = Nd4j.rand(DataType.FLOAT, inShape); INDArray[] result = initializedModel.output(f); assertArrayEquals(result[0].shape(), outShape); INDArray l = outShape.length == 2 ? TestUtils.randomOneHot(1, (int)outShape[1], 12345) : Nd4j.rand(DataType.FLOAT, outShape); initializedModel.fit(new org.nd4j.linalg.dataset.DataSet(f, l)); // clean up for current model Nd4j.getWorkspaceManager().destroyAllWorkspacesForCurrentThread(); f.close(); l.close(); initializedModel.params().close(); initializedModel.getFlattenedGradients().close(); System.gc(); }
Example 4
Source File: PipelineExecutioner.java From konduit-serving with Apache License 2.0 | 5 votes |
private void timedResponse(RoutingContext ctx, Output.DataFormat outputDataFormat, String batchId, INDArray[] arrays, Map<String, BatchOutput> batchOutputMap) { long startTime = System.nanoTime(); writeResponse(batchOutputMap, outputDataFormat, batchId, ctx); logTimings(startTime); for (INDArray array : arrays) { if (array.closeable()) array.close(); } }
Example 5
Source File: ArrayCloseMemoryMgr.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Override public void release(@NonNull INDArray array) { if (!array.wasClosed() && array.closeable()) { array.close(); log.trace("Closed array (deallocated) - id={}", array.getId()); } }
Example 6
Source File: Nd4jValidator.java From deeplearning4j with Apache License 2.0 | 5 votes |
/** * Validate whether the file represents a valid Numpy .npz file to be read with {@link Nd4j#createFromNpyFile(File)} } * * @param f File that should represent a Numpy .npz file written with Numpy savez method * @return Result of validation */ public static ValidationResult validateNpzFile(@NonNull File f) { ValidationResult vr = Nd4jCommonValidator.isValidFile(f, "Numpy .npz File", false); if (vr != null && !vr.isValid()) return vr; Map<String, INDArray> m = null; try { m = Nd4j.createFromNpzFile(f); } catch (Throwable t) { return ValidationResult.builder() .valid(false) .formatType("Numpy .npz File") .path(Nd4jCommonValidator.getPath(f)) .issues(Collections.singletonList("File may be corrupt or is not a Numpy .npz file")) .exception(t) .build(); } finally { //Deallocate immediately if (m != null) { for (INDArray arr : m.values()) { if (arr != null) { arr.close(); } } } } return ValidationResult.builder() .valid(true) .formatType("Numpy .npz File") .path(Nd4jCommonValidator.getPath(f)) .build(); }
Example 7
Source File: NonInplaceValidationListener.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Override public void opExecution(SameDiff sd, At at, MultiDataSet batch, SameDiffOp op, OpContext opContext, INDArray[] outputs) { if(op.getOp().isInPlace()){ //Don't check inplace op return; } MessageDigest md; try { md = MessageDigest.getInstance("MD5"); } catch (Throwable t){ throw new RuntimeException(t); } for( int i=0; i<opInputs.length; i++ ){ if(opInputs[i].isEmpty()) continue; //Need to hash - to ensure zero changes to input array byte[] before = opInputs[i].data().asBytes(); INDArray after = this.opInputsOrig[i]; boolean dealloc = false; if(opInputs[i].ordering() != opInputsOrig[i].ordering() || Arrays.equals(opInputs[i].stride(), opInputsOrig[i].stride()) || opInputs[i].elementWiseStride() != opInputsOrig[i].elementWiseStride()){ //Clone if required (otherwise fails for views etc) after = opInputsOrig[i].dup(); dealloc = true; } byte[] afterB = after.data().asBytes(); byte[] hash1 = md.digest(before); byte[] hash2 = md.digest(afterB); boolean eq = Arrays.equals(hash1, hash2); if(eq){ passCounter.addAndGet(1); } else { failCounter.addAndGet(1); } Preconditions.checkState(eq, "Input array for non-inplace op was modified during execution " + "for op %s - input %s", op.getOp().getClass(), i); //Deallocate: if(dealloc && after.closeable()){ after.close(); } if(opInputs[i].closeable()){ opInputs[i].close(); } } }
Example 8
Source File: ArrayCacheMemoryMgr.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Override public void release(@NonNull INDArray array) { //Check for multiple releases of the array long id = array.getId(); Preconditions.checkState(!lruCache.contains(id), "Array was released multiple times: id=%s, shape=%ndShape", id, array); DataType dt = array.dataType(); long thisBytes = array.data().length() * dt.width(); if(array.dataType() == DataType.UTF8) { //Don't cache string arrays due to variable length buffers if(array.closeable()) array.close(); } else if (currentCacheSize + thisBytes > maxCacheBytes) { if(thisBytes > maxCacheBytes){ //Can't store even if we clear everything - too large if(array.closeable()) array.close(); return; } //Need to deallocate some arrays to stay under limit - do in "oldest first" order Iterator<Long> iter = lruCache.iterator(); while(currentCacheSize + thisBytes > maxCacheBytes){ long next = iter.next(); iter.remove(); INDArray nextOldest = lruCacheValues.remove(next); DataType ndt = nextOldest.dataType(); long nextBytes = ndt.width() * nextOldest.data().length(); arrayStores.get(ndt).removeObject(nextOldest); currentCacheSize -= nextBytes; if(nextOldest.closeable()) nextOldest.close(); } //After clearing space - can now cache cacheArray(array); } else { //OK to cache cacheArray(array); } //Store in LRU cache for "last used" removal if we exceed cache size lruCache.add(array.getId()); lruCacheValues.put(array.getId(), array); }
Example 9
Source File: ArraySavingListener.java From deeplearning4j with Apache License 2.0 | 4 votes |
public static void compare(File dir1, File dir2, double eps) throws Exception { File[] files1 = dir1.listFiles(); File[] files2 = dir2.listFiles(); Preconditions.checkNotNull(files1, "No files in directory 1: %s", dir1); Preconditions.checkNotNull(files2, "No files in directory 2: %s", dir2); Preconditions.checkState(files1.length == files2.length, "Different number of files: %s vs %s", files1.length, files2.length); Map<String,File> m1 = toMap(files1); Map<String,File> m2 = toMap(files2); for(File f : files1){ String name = f.getName(); String varName = name.substring(name.indexOf('_') + 1, name.length()-4); //Strip "x_" and ".bin" File f2 = m2.get(varName); INDArray arr1 = Nd4j.readBinary(f); INDArray arr2 = Nd4j.readBinary(f2); //TODO String arrays won't work here! boolean eq = arr1.equalsWithEps(arr2, eps); if(eq){ System.out.println("Equals: " + varName.replaceAll("__", "/")); } else { if(arr1.dataType() == DataType.BOOL){ INDArray xor = Nd4j.exec(new Xor(arr1, arr2)); int count = xor.castTo(DataType.INT).sumNumber().intValue(); System.out.println("FAILS: " + varName.replaceAll("__", "/") + " - boolean, # differences = " + count); System.out.println("\t" + f.getAbsolutePath()); System.out.println("\t" + f2.getAbsolutePath()); xor.close(); } else { INDArray sub = arr1.sub(arr2); INDArray diff = Nd4j.math.abs(sub); double maxDiff = diff.maxNumber().doubleValue(); System.out.println("FAILS: " + varName.replaceAll("__", "/") + " - max difference = " + maxDiff); System.out.println("\t" + f.getAbsolutePath()); System.out.println("\t" + f2.getAbsolutePath()); sub.close(); diff.close(); } } arr1.close(); arr2.close(); } }
Example 10
Source File: SameDiffOutputLayer.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Override public Pair<Gradient, INDArray> backpropGradient(INDArray epsilon, LayerWorkspaceMgr workspaceMgr) { assertInputSet(true); Preconditions.checkState(!layerConf().labelsRequired() || labels != null, "Cannot execute backprop: Labels are not set. " + "If labels are not required for this SameDiff output layer, override SameDiffOutputLayer.labelsRequired()" + " to return false instead"); Gradient g = new DefaultGradient(); INDArray dLdIn; try(MemoryWorkspace ws = Nd4j.getWorkspaceManager().scopeOutOfWorkspaces()) { if (sameDiff == null) { //Usually doInit will be called in forward pass; not necessarily the case in output layers // (for efficiency, we skip output layer forward pass in MultiLayerNetwork/ComputationGraph) doInit(); } if(sameDiff.getFunction("grad") == null) sameDiff.createGradFunction(INPUT_KEY); } //Configure memory management for SameDiff instance - use DL4J workspaces Map<Long,InferenceSession> sessionMap = sameDiff.getFunction("grad").getSessions(); if(!sessionMap.containsKey(Thread.currentThread().getId())){ sessionMap.put(Thread.currentThread().getId(), new InferenceSession(sameDiff.getFunction("grad"))); } String wsNameWorking = workspaceMgr.getWorkspaceName(ArrayType.BP_WORKING_MEM); String wsNameActGrad = workspaceMgr.getWorkspaceName(ArrayType.ACTIVATION_GRAD); WorkspaceConfiguration confWorking = workspaceMgr.getConfiguration(ArrayType.BP_WORKING_MEM); WorkspaceConfiguration confOutput = workspaceMgr.getConfiguration(ArrayType.ACTIVATION_GRAD); boolean actGradScopedOut = workspaceMgr.isScopedOut(ArrayType.ACTIVATION_GRAD); Preconditions.checkState(actGradScopedOut || wsNameActGrad != null, "Activation gradients must have a workspace or be scoped out"); SessionMemMgr mmgr = new DL4JSameDiffMemoryMgr(wsNameWorking, wsNameActGrad, confWorking, confOutput); sessionMap.get(Thread.currentThread().getId()).setMmgr(mmgr); if(!sameDiff.hasGradientFunction()) { //Create when scoped out, to ensure any arrays are not in WS sameDiff.createGradFunction(INPUT_KEY); } List<String> gradVarNames = new ArrayList<>(); gradVarNames.addAll(paramTable.keySet()); gradVarNames.add(INPUT_KEY); Map<String,INDArray> phMap = new HashMap<>(); phMap.put(INPUT_KEY, input); phMap.put(LABELS_KEY, labels); Map<String,INDArray> grads = sameDiff.calculateGradients(phMap, gradVarNames); for(String s : paramTable.keySet() ){ INDArray sdGrad = grads.get(s); INDArray dl4jGrad = gradTable.get(s); dl4jGrad.assign(sdGrad); //TODO OPTIMIZE THIS g.gradientForVariable().put(s, dl4jGrad); if(sdGrad.closeable()){ sdGrad.close(); } } dLdIn = grads.get(INPUT_KEY); //Clear placeholders and op inputs to ensure no out-of-scope arrays are still referenced anywhere sameDiff.clearPlaceholders(true); sameDiff.clearOpInputs(); //TODO there may be a cleaner way to do this... if(!actGradScopedOut && !dLdIn.data().getParentWorkspace().getId().equals(wsNameActGrad)){ dLdIn = workspaceMgr.dup(ArrayType.ACTIVATION_GRAD, dLdIn); } else if(actGradScopedOut && dLdIn.isAttached()){ dLdIn = dLdIn.detach(); } return new Pair<>(g, dLdIn); }