Java Code Examples for org.nd4j.linalg.primitives.Pair#getSecond()
The following examples show how to use
org.nd4j.linalg.primitives.Pair#getSecond() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TSNEVisualizationExample.java From Java-Deep-Learning-Cookbook with MIT License | 6 votes |
public static void main(String[] args) throws IOException { Nd4j.setDataType(DataBuffer.Type.DOUBLE); List<String> cacheList = new ArrayList<>(); File file = new File("words.txt"); String outputFile = "tsne-standard-coords.csv"; Pair<InMemoryLookupTable,VocabCache> vectors = WordVectorSerializer.loadTxt(file); VocabCache cache = vectors.getSecond(); INDArray weights = vectors.getFirst().getSyn0(); for(int i=0;i<cache.numWords();i++){ cacheList.add(cache.wordAtIndex(i)); } BarnesHutTsne tsne = new BarnesHutTsne.Builder() .setMaxIter(100) .theta(0.5) .normalize(false) .learningRate(500) .useAdaGrad(false) .build(); tsne.fit(weights); tsne.saveAsFile(cacheList,outputFile); }
Example 2
Source File: SameDiffTests.java From nd4j with Apache License 2.0 | 6 votes |
@Test public void testExpandSqueezeChain() { val origShape = new long[]{3, 4}; for (int i = 0; i < 3; i++) { for (Pair<INDArray, String> p : NDArrayCreationUtil.getAllTestMatricesWithShape(origShape[0], origShape[1], 12345)) { INDArray inArr = p.getFirst().muli(100); SameDiff sd = SameDiff.create(); SDVariable in = sd.var("in", inArr); SDVariable expand = sd.expandDims(in, i); SDVariable squeeze = sd.squeeze(expand, i); INDArray out = sd.execAndEndResult(); String msg = "expand/Squeeze=" + i + ", source=" + p.getSecond(); assertEquals(msg, out, inArr); //expand -> squeeze: should be opposite ops } } }
Example 3
Source File: NDArrayTestsFortran.java From nd4j with Apache License 2.0 | 6 votes |
@Test public void testDupAndDupWithOrder() { List<Pair<INDArray, String>> testInputs = NDArrayCreationUtil.getAllTestMatricesWithShape(4, 5, 123); int count = 0; for (Pair<INDArray, String> pair : testInputs) { String msg = pair.getSecond(); INDArray in = pair.getFirst(); System.out.println("Count " + count); INDArray dup = in.dup(); INDArray dupc = in.dup('c'); INDArray dupf = in.dup('f'); assertEquals(msg, in, dup); assertEquals(msg, dup.ordering(), (char) Nd4j.order()); assertEquals(msg, dupc.ordering(), 'c'); assertEquals(msg, dupf.ordering(), 'f'); assertEquals(msg, in, dupc); assertEquals(msg, in, dupf); count++; } }
Example 4
Source File: DelayedMemoryTest.java From nd4j with Apache License 2.0 | 6 votes |
@Test public void testDelayedTAD1() throws Exception { TADManager tadManager = new DeviceTADManager(); INDArray array = Nd4j.create(128, 256); Pair<DataBuffer, DataBuffer> tadBuffers = tadManager.getTADOnlyShapeInfo(array, new int[]{0}); DataBuffer tadBuffer = tadBuffers.getFirst(); DataBuffer offBuffer = tadBuffers.getSecond(); AllocationPoint pointTad = AtomicAllocator.getInstance().getAllocationPoint(tadBuffer); AllocationPoint pointOff = AtomicAllocator.getInstance().getAllocationPoint(offBuffer); assertEquals(AllocationStatus.CONSTANT, pointTad.getAllocationStatus()); assertEquals(AllocationStatus.DEVICE, pointOff.getAllocationStatus()); }
Example 5
Source File: GradCheckMisc.java From nd4j with Apache License 2.0 | 6 votes |
@Test public void testPermuteGradient() { int[] origShape = new int[]{3, 4, 5}; for (int[] perm : new int[][]{{0, 1, 2}, {0, 2, 1}, {1, 0, 2}, {1, 2, 0}, {2, 0, 1}, {2, 1, 0}}) { for (Pair<INDArray, String> p : NDArrayCreationUtil.getAll3dTestArraysWithShape(12345, origShape)) { INDArray inArr = p.getFirst().muli(100); SameDiff sd = SameDiff.create(); SDVariable in = sd.var("in", inArr); SDVariable permute = sd.f().permute(in, perm); //Using stdev here: mean/sum would backprop the same gradient for each input... SDVariable stdev = sd.standardDeviation("out", permute, true); INDArray out = sd.execAndEndResult(); INDArray expOut = in.getArr().std(true, Integer.MAX_VALUE); assertEquals(expOut, out); String msg = "permute=" + Arrays.toString(perm) + ", source=" + p.getSecond(); boolean ok = GradCheckUtil.checkGradients(sd); assertTrue(msg, ok); } } }
Example 6
Source File: SameDiffTests.java From nd4j with Apache License 2.0 | 6 votes |
@Test public void testSqueezeExpandChain() { val origShape = new long[]{3, 4, 5}; for (int i = 0; i < 3; i++) { val shape = origShape.clone(); shape[i] = 1; for (Pair<INDArray, String> p : NDArrayCreationUtil.getAll3dTestArraysWithShape(12345, shape)) { INDArray inArr = p.getFirst().muli(100); SameDiff sd = SameDiff.create(); SDVariable in = sd.var("in", inArr); SDVariable squeeze = sd.squeeze(in, i); SDVariable expand = sd.expandDims(squeeze, i); INDArray out = sd.execAndEndResult(); String msg = "expand/Squeeze=" + i + ", source=" + p.getSecond(); assertEquals(msg, out, inArr); //squeeze -> expand: should be opposite ops } } }
Example 7
Source File: TSNEVisualizationExample.java From Java-Deep-Learning-Cookbook with MIT License | 6 votes |
public static void main(String[] args) throws IOException { Nd4j.setDataType(DataBuffer.Type.DOUBLE); List<String> cacheList = new ArrayList<>(); File file = new File("words.txt"); String outputFile = "tsne-standard-coords.csv"; Pair<InMemoryLookupTable,VocabCache> vectors = WordVectorSerializer.loadTxt(file); VocabCache cache = vectors.getSecond(); INDArray weights = vectors.getFirst().getSyn0(); for(int i=0;i<cache.numWords();i++){ cacheList.add(cache.wordAtIndex(i)); } BarnesHutTsne tsne = new BarnesHutTsne.Builder() .setMaxIter(100) .theta(0.5) .normalize(false) .learningRate(500) .useAdaGrad(false) .build(); tsne.fit(weights); tsne.saveAsFile(cacheList,outputFile); }
Example 8
Source File: PLNetInputOptimizer.java From AILibs with GNU Affero General Public License v3.0 | 5 votes |
private static INDArray computeInputDerivative(PLNetDyadRanker plNet, INDArray input, InputOptimizerLoss loss) { MultiLayerNetwork net = plNet.getPlNet(); INDArray output = net.output(input); INDArray lossGradient = Nd4j.create(new double[] { loss.lossGradient(output) }); net.setInput(input); net.feedForward(false, false); Pair<Gradient, INDArray> p = net.backpropGradient(lossGradient, null); return p.getSecond(); }
Example 9
Source File: Nd4jTest.java From nd4j with Apache License 2.0 | 5 votes |
@Test public void testExpandDims(){ final List<Pair<INDArray, String>> testMatricesC = NDArrayCreationUtil.getAllTestMatricesWithShape('c', 3, 5, 0xDEAD); final List<Pair<INDArray, String>> testMatricesF = NDArrayCreationUtil.getAllTestMatricesWithShape('f', 7, 11, 0xBEEF); final ArrayList<Pair<INDArray, String>> testMatrices = new ArrayList<>(testMatricesC); testMatrices.addAll(testMatricesF); for (Pair<INDArray, String> testMatrixPair : testMatrices) { final String recreation = testMatrixPair.getSecond(); final INDArray testMatrix = testMatrixPair.getFirst(); final char ordering = testMatrix.ordering(); val shape = testMatrix.shape(); final int rank = testMatrix.rank(); for (int i = -rank; i <= rank; i++) { final INDArray expanded = Nd4j.expandDims(testMatrix, i); final String message = "Expanding in Dimension " + i + "; Shape before expanding: " + Arrays.toString(shape) + " "+ordering+" Order; Shape after expanding: " + Arrays.toString(expanded.shape()) + " "+expanded.ordering()+"; Input Created via: " + recreation; assertEquals(message, 1, expanded.shape()[i < 0 ? i + rank : i]); assertEquals(message, testMatrix.ravel(), expanded.ravel()); assertEquals(message, ordering, expanded.ordering()); testMatrix.assign(Nd4j.rand(shape)); assertEquals(message, testMatrix.ravel(), expanded.ravel()); } } }
Example 10
Source File: Predict.java From dl4j-tutorials with MIT License | 5 votes |
public static void main(String[] args) throws Exception { String testPath = "data/test"; File testDir = new File(testPath); File[] files = testDir.listFiles(); Pair<MultiLayerNetwork, Normalizer> modelAndNormalizer = ModelSerializer .restoreMultiLayerNetworkAndNormalizer(new File("model/AlexNet.zip"), false); NativeImageLoader imageLoader = new NativeImageLoader(256, 256, 3); MultiLayerNetwork network = modelAndNormalizer.getFirst(); DataNormalization normalizer = (DataNormalization) modelAndNormalizer.getSecond(); Map<Integer, String> map = new HashMap<>(); map.put(0, "CITY"); map.put(1, "DESERT"); map.put(2, "FARMLAND"); map.put(3, "LAKE"); map.put(4, "MOUNTAIN"); map.put(5, "OCEAN"); for (File file : files) { INDArray indArray = imageLoader.asMatrix(file); normalizer.transform(indArray); int[] values = network.predict(indArray); String label = map.get(values[0]); System.out.println(file.getName() + "," + label); } }
Example 11
Source File: NDArrayTestsFortran.java From nd4j with Apache License 2.0 | 5 votes |
@Test public void testToOffsetZeroCopy() { List<Pair<INDArray, String>> testInputs = NDArrayCreationUtil.getAllTestMatricesWithShape(4, 5, 123); for (Pair<INDArray, String> pair : testInputs) { String msg = pair.getSecond(); INDArray in = pair.getFirst(); INDArray dup = Shape.toOffsetZeroCopy(in); INDArray dupc = Shape.toOffsetZeroCopy(in, 'c'); INDArray dupf = Shape.toOffsetZeroCopy(in, 'f'); INDArray dupany = Shape.toOffsetZeroCopyAnyOrder(in); assertEquals(msg, in, dup); assertEquals(msg, in, dupc); assertEquals(msg, in, dupf); assertEquals(msg, dupc.ordering(), 'c'); assertEquals(msg, dupf.ordering(), 'f'); assertEquals(msg, in, dupany); assertEquals(dup.offset(), 0); assertEquals(dupc.offset(), 0); assertEquals(dupf.offset(), 0); assertEquals(dupany.offset(), 0); assertEquals(dup.length(), dup.data().length()); assertEquals(dupc.length(), dupc.data().length()); assertEquals(dupf.length(), dupf.data().length()); assertEquals(dupany.length(), dupany.data().length()); } }
Example 12
Source File: SameDiffTests.java From nd4j with Apache License 2.0 | 5 votes |
@Test public void testSqueezeDims() { val origShape = new long[]{3, 4, 5}; for (int i = 0; i < 3; i++) { val shape = origShape.clone(); shape[i] = 1; for (Pair<INDArray, String> p : NDArrayCreationUtil.getAll3dTestArraysWithShape(12345, shape)) { INDArray inArr = p.getFirst().muli(100); SameDiff sd = SameDiff.create(); SDVariable in = sd.var("in", inArr); SDVariable squeeze = sd.f().squeeze(in, i); INDArray out = sd.execAndEndResult(); INDArray expOut; switch (i) { case 0: expOut = inArr.dup('c').reshape('c', origShape[1], origShape[2]); break; case 1: expOut = inArr.dup('c').reshape('c', origShape[0], origShape[2]); break; case 2: expOut = inArr.dup('c').reshape('c', origShape[0], origShape[1]); break; default: throw new RuntimeException(); } String msg = "squeezeDim=" + i + ", source=" + p.getSecond(); assertEquals(msg, out, expOut); } } }
Example 13
Source File: Nd4jTestsComparisonC.java From nd4j with Apache License 2.0 | 4 votes |
private static String getGemmErrorMsg(int i, int j, boolean transposeA, boolean transposeB, double alpha, double beta, Pair<INDArray, String> first, Pair<INDArray, String> second) { return i + "," + j + " - gemm(tA=" + transposeA + ",tB=" + transposeB + ",alpha=" + alpha + ",beta=" + beta + "). A=" + first.getSecond() + ", B=" + second.getSecond(); }
Example 14
Source File: Nd4jTestsComparisonC.java From nd4j with Apache License 2.0 | 4 votes |
private static String getTestWithOpsErrorMsg(int i, int j, String op, Pair<INDArray, String> first, Pair<INDArray, String> second) { return i + "," + j + " - " + first.getSecond() + "." + op + "(" + second.getSecond() + ")"; }
Example 15
Source File: Nd4jTestsComparisonFortran.java From nd4j with Apache License 2.0 | 4 votes |
private static String getGemmErrorMsg(int i, int j, boolean transposeA, boolean transposeB, double alpha, double beta, Pair<INDArray, String> first, Pair<INDArray, String> second) { return i + "," + j + " - gemm(tA=" + transposeA + ",tB= " + transposeB + ",alpha=" + alpha + ",beta= " + beta + "). A=" + first.getSecond() + ", B=" + second.getSecond(); }
Example 16
Source File: CudaGridExecutioner.java From nd4j with Apache License 2.0 | 4 votes |
/** * This method returns Op as set of required pointers for it * @param op * @param dimensions * @return */ protected GridPointers pointerizeOp(Op op, int... dimensions) { GridPointers pointers = new GridPointers(op, dimensions); AtomicAllocator allocator = AtomicAllocator.getInstance(); // CudaContext context = AtomicAllocator.getInstance().getFlowController().prepareAction(op.z(), op.x(), op.y()); // FIXME: do not leave it as is CudaContext context = (CudaContext) allocator.getDeviceContext().getContext(); pointers.setX(allocator.getPointer(op.x(), context)); pointers.setXShapeInfo(allocator.getPointer(op.x().shapeInfoDataBuffer(), context)); pointers.setZ(allocator.getPointer(op.z(), context)); pointers.setZShapeInfo(allocator.getPointer(op.z().shapeInfoDataBuffer(), context)); pointers.setZLength(op.z().length()); if (op.y() != null) { pointers.setY(allocator.getPointer(op.y(), context)); pointers.setYShapeInfo(allocator.getPointer(op.y().shapeInfoDataBuffer(), context)); } if (dimensions != null && dimensions.length > 0) { DataBuffer dimensionBuffer = Nd4j.getConstantHandler().getConstantBuffer(dimensions); pointers.setDimensions(allocator.getPointer(dimensionBuffer, context)); pointers.setDimensionsLength(dimensions.length); } // we build TADs if (dimensions != null && dimensions.length > 0) { Pair<DataBuffer, DataBuffer> tadBuffers = tadManager.getTADOnlyShapeInfo(op.x(), dimensions); Pointer devTadShapeInfo = AtomicAllocator.getInstance().getPointer(tadBuffers.getFirst(), context); Pointer devTadOffsets = tadBuffers.getSecond() == null ? null : AtomicAllocator.getInstance().getPointer(tadBuffers.getSecond(), context); // we don't really care, if tadOffsets will be nulls pointers.setTadShape(devTadShapeInfo); pointers.setTadOffsets(devTadOffsets); } return pointers; }
Example 17
Source File: Nd4jTestsComparisonFortran.java From nd4j with Apache License 2.0 | 4 votes |
private static String getTestWithOpsErrorMsg(int i, int j, String op, Pair<INDArray, String> first, Pair<INDArray, String> second) { return i + "," + j + " - " + first.getSecond() + "." + op + "(" + second.getSecond() + ")"; }
Example 18
Source File: DeviceTADManager.java From nd4j with Apache License 2.0 | 4 votes |
@Override public Pair<DataBuffer, DataBuffer> getTADOnlyShapeInfo(INDArray array, int[] dimension) { /* so, we check, if we have things cached. If we don't - we just create new TAD shape, and push it to constant memory */ if (dimension != null && dimension.length > 1) Arrays.sort(dimension); Integer deviceId = AtomicAllocator.getInstance().getDeviceId(); //log.info("Requested TAD for device [{}], dimensions: [{}]", deviceId, Arrays.toString(dimension)); //extract the dimensions and shape buffer for comparison TadDescriptor descriptor = new TadDescriptor(array, dimension); if (!tadCache.get(deviceId).containsKey(descriptor)) { log.trace("Creating new TAD..."); //create the TAD with the shape information and corresponding offsets //note that we use native code to get access to the shape information. Pair<DataBuffer, DataBuffer> buffers = super.getTADOnlyShapeInfo(array, dimension); /** * Store the buffers in constant memory. * The main implementation of this is cuda right now. * * Explanation from: http://cuda-programming.blogspot.jp/2013/01/what-is-constant-memory-in-cuda.html * The CUDA language makes available another kind of memory known as constant memory. As the opName may indicate, we use constant memory for data that will not change over the course of a kernel execution. Why Constant Memory? NVIDIA hardware provides 64KB of constant memory that it treats differently than it treats standard global memory. In some situations, using constant memory rather than global memory will reduce the required memory bandwidth. NOTE HERE FOR US: We use 48kb of it using these methods. Note also that we use the {@link AtomicAllocator} which is the cuda memory manager for moving the current host space data buffer to constant memory. We do this for device access to shape information. */ if (buffers.getFirst() != array.shapeInfoDataBuffer()) AtomicAllocator.getInstance().moveToConstant(buffers.getFirst()); /** * @see {@link org.nd4j.jita.constant.ProtectedCudaConstantHandler} */ if (buffers.getSecond() != null) AtomicAllocator.getInstance().moveToConstant(buffers.getSecond()); // so, at this point we have buffer valid on host side. // And we just need to replace DevicePointer with constant pointer tadCache.get(deviceId).put(descriptor, buffers); bytes.addAndGet((buffers.getFirst().length() * 4)); if (buffers.getSecond() != null) bytes.addAndGet(buffers.getSecond().length() * 8); log.trace("Using TAD from cache..."); } return tadCache.get(deviceId).get(descriptor); }
Example 19
Source File: BaseSparseNDArray.java From nd4j with Apache License 2.0 | 4 votes |
protected void setShapeInformation(Pair<DataBuffer, long[]> shapeInfo) { this.shapeInformation = shapeInfo.getFirst(); this.javaShapeInformation = shapeInfo.getSecond(); }
Example 20
Source File: GradCheckMisc.java From nd4j with Apache License 2.0 | 4 votes |
@Test public void testExpandDimsGradient() { val origShape = new long[]{3, 4}; boolean first = true; for (int i = 0; i < 3; i++) { long[] expExpandShape; switch (i) { case 0: expExpandShape = new long[]{1, 3, 4}; break; case 1: expExpandShape = new long[]{3, 1, 4}; break; case 2: expExpandShape = new long[]{3, 4, 1}; break; default: throw new RuntimeException(); } for (Pair<INDArray, String> p : NDArrayCreationUtil.getAllTestMatricesWithShape(origShape[0], origShape[1], 12345)) { INDArray inArr = p.getFirst().muli(100); SameDiff sd = SameDiff.create(); SDVariable in = sd.var("in", inArr); SDVariable expand = sd.f().expandDims(in, i); //Using stdev here: mean/sum would backprop the same gradient for each input... SDVariable stdev = sd.standardDeviation("out", expand, true); INDArray out = sd.execAndEndResult(); INDArray expOut = in.getArr().std(true, Integer.MAX_VALUE); assertEquals(expOut, out); assertArrayEquals(expExpandShape, expand.getArr().shape()); INDArray expExpand = inArr.dup('c').reshape(expExpandShape); assertEquals(expExpand, expand.getArr()); String msg = "expandDim=" + i + ", source=" + p.getSecond(); log.info("Starting: " + msg); boolean ok = GradCheckUtil.checkGradients(sd); assertTrue(msg, ok); } } }