Java Code Examples for org.nd4j.linalg.factory.Nd4j#alloc()
The following examples show how to use
org.nd4j.linalg.factory.Nd4j#alloc() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DoubleDataBufferTest.java From nd4j with Apache License 2.0 | 6 votes |
@Test public void testSerialization() { DataBuffer buf = Nd4j.createBuffer(5); String fileName = "buf.ser"; File file = new File(fileName); file.deleteOnExit(); SerializationUtils.saveObject(buf, file); DataBuffer buf2 = SerializationUtils.readObject(file); //assertEquals(buf, buf2); assertArrayEquals(buf.asDouble(), buf2.asDouble(), 0.001); Nd4j.alloc = DataBuffer.AllocationMode.DIRECT; buf = Nd4j.createBuffer(5); file.deleteOnExit(); SerializationUtils.saveObject(buf, file); buf2 = SerializationUtils.readObject(file); // assertEquals(buf, buf2); assertArrayEquals(buf.asDouble(), buf2.asDouble(), 0.001); }
Example 2
Source File: FloatDataBufferTest.java From nd4j with Apache License 2.0 | 6 votes |
@Test public void testSerialization() { DataBuffer buf = Nd4j.createBuffer(5); String fileName = "buf.ser"; File file = new File(fileName); file.deleteOnExit(); SerializationUtils.saveObject(buf, file); DataBuffer buf2 = SerializationUtils.readObject(file); // assertEquals(buf, buf2); assertArrayEquals(buf.asFloat(), buf2.asFloat(), 0.0001f); Nd4j.alloc = DataBuffer.AllocationMode.DIRECT; buf = Nd4j.createBuffer(5); file.deleteOnExit(); SerializationUtils.saveObject(buf, file); buf2 = SerializationUtils.readObject(file); //assertEquals(buf, buf2); assertArrayEquals(buf.asFloat(), buf2.asFloat(), 0.0001f); }
Example 3
Source File: DoubleDataBufferTest.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void testSerialization() throws Exception { File dir = testDir.newFolder(); DataBuffer buf = Nd4j.createBuffer(5); String fileName = "buf.ser"; File file = new File(dir, fileName); file.deleteOnExit(); SerializationUtils.saveObject(buf, file); DataBuffer buf2 = SerializationUtils.readObject(file); //assertEquals(buf, buf2); assertArrayEquals(buf.asDouble(), buf2.asDouble(), 0.001); Nd4j.alloc = DataBuffer.AllocationMode.DIRECT; buf = Nd4j.createBuffer(5); file.deleteOnExit(); SerializationUtils.saveObject(buf, file); buf2 = SerializationUtils.readObject(file); // assertEquals(buf, buf2); assertArrayEquals(buf.asDouble(), buf2.asDouble(), 0.001); }
Example 4
Source File: FloatDataBufferTest.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void testSerialization() throws Exception { File dir = testDir.newFolder(); DataBuffer buf = Nd4j.createBuffer(5); String fileName = "buf.ser"; File file = new File(dir, fileName); file.deleteOnExit(); SerializationUtils.saveObject(buf, file); DataBuffer buf2 = SerializationUtils.readObject(file); // assertEquals(buf, buf2); assertArrayEquals(buf.asFloat(), buf2.asFloat(), 0.0001f); Nd4j.alloc = DataBuffer.AllocationMode.DIRECT; buf = Nd4j.createBuffer(5); file.deleteOnExit(); SerializationUtils.saveObject(buf, file); buf2 = SerializationUtils.readObject(file); //assertEquals(buf, buf2); assertArrayEquals(buf.asFloat(), buf2.asFloat(), 0.0001f); }
Example 5
Source File: ConvolutionTests.java From nd4j with Apache License 2.0 | 4 votes |
@Test @Ignore public void testCompareIm2ColImpl() { int[] miniBatches = {1, 3, 5}; int[] depths = {1, 3, 5}; int[] inHeights = {5, 21}; int[] inWidths = {5, 21}; int[] strideH = {1, 2}; int[] strideW = {1, 2}; int[] sizeW = {1, 2, 3}; int[] sizeH = {1, 2, 3}; int[] padH = {0, 1, 2}; int[] padW = {0, 1, 2}; boolean[] coverall = {false, true}; DataBuffer.Type[] types = new DataBuffer.Type[] {DataBuffer.Type.FLOAT, DataBuffer.Type.DOUBLE, DataBuffer.Type.FLOAT, DataBuffer.Type.DOUBLE}; DataBuffer.AllocationMode[] modes = new DataBuffer.AllocationMode[] {DataBuffer.AllocationMode.HEAP, DataBuffer.AllocationMode.HEAP, DataBuffer.AllocationMode.DIRECT, DataBuffer.AllocationMode.DIRECT}; String factoryClassName = Nd4j.factory().getClass().toString().toLowerCase(); if (factoryClassName.contains("jcublas") || factoryClassName.contains("cuda")) { //Only test direct for CUDA; test all for CPU types = new DataBuffer.Type[] {DataBuffer.Type.FLOAT, DataBuffer.Type.DOUBLE}; modes = new DataBuffer.AllocationMode[] {DataBuffer.AllocationMode.DIRECT, DataBuffer.AllocationMode.DIRECT}; } DataBuffer.Type initialType = Nd4j.dataType(); for (int i = 0; i < types.length; i++) { DataBuffer.Type type = types[i]; DataBuffer.AllocationMode mode = modes[i]; DataTypeUtil.setDTypeForContext(type); Nd4j.alloc = mode; AllocUtil.setAllocationModeForContext(mode); for (int m : miniBatches) { for (int d : depths) { for (int h : inHeights) { for (int w : inWidths) { for (int sh : strideH) { for (int sw : strideW) { for (int kh : sizeH) { for (int kw : sizeW) { for (int ph : padH) { for (int pw : padW) { if ((w - kw + 2 * pw) % sw != 0 || (h - kh + 2 * ph) % sh != 0) continue; //(w-kp+2*pw)/sw + 1 is not an integer, i.e., number of outputs doesn't fit System.out.println("Running " + m + " " + d + " " + h + " " + w); for (boolean cAll : coverall) { INDArray in = Nd4j.rand(new int[] {m, d, h, w}); //assertEquals(in.data().allocationMode(), mode); //assertEquals(in.data().dataType(), opType); INDArray outOrig = OldConvolution.im2col(in, kh, kw, sh, sw, ph, pw, -1, cAll); //Old implementation INDArray outNew = Convolution.im2col(in, kh, kw, sh, sw, ph, pw, cAll); //Current implementation assertArrayEquals(outOrig.data().asFloat(), outNew.data().asFloat(), 0.01f); assertEquals(outOrig, outNew); } } } } } } } } } } } } DataTypeUtil.setDTypeForContext(initialType); }
Example 6
Source File: ConvolutionTests.java From nd4j with Apache License 2.0 | 4 votes |
@Test @Ignore public void testCompareIm2Col() throws Exception { int[] miniBatches = {1, 3, 5}; int[] depths = {1, 3, 5}; int[] inHeights = {5, 21}; int[] inWidths = {5, 21}; int[] strideH = {1, 2}; int[] strideW = {1, 2}; int[] sizeW = {1, 2, 3}; int[] sizeH = {1, 2, 3}; int[] padH = {0, 1, 2}; int[] padW = {0, 1, 2}; DataBuffer.Type[] types = new DataBuffer.Type[] {DataBuffer.Type.FLOAT, DataBuffer.Type.DOUBLE, DataBuffer.Type.FLOAT, DataBuffer.Type.DOUBLE}; DataBuffer.AllocationMode[] modes = new DataBuffer.AllocationMode[] {DataBuffer.AllocationMode.HEAP, DataBuffer.AllocationMode.HEAP, DataBuffer.AllocationMode.DIRECT, DataBuffer.AllocationMode.DIRECT}; String factoryClassName = Nd4j.factory().getClass().toString().toLowerCase(); if (factoryClassName.contains("jcublas") || factoryClassName.contains("cuda")) { //Only test direct for CUDA; test all for CPU types = new DataBuffer.Type[] {DataBuffer.Type.FLOAT, DataBuffer.Type.DOUBLE}; modes = new DataBuffer.AllocationMode[] {DataBuffer.AllocationMode.DIRECT, DataBuffer.AllocationMode.DIRECT}; } DataBuffer.Type inititalType = Nd4j.dataType(); for (int i = 0; i < types.length; i++) { DataBuffer.Type type = types[i]; DataBuffer.AllocationMode mode = modes[i]; DataTypeUtil.setDTypeForContext(type); Nd4j.alloc = mode; for (int m : miniBatches) { for (int d : depths) { for (int h : inHeights) { for (int w : inWidths) { for (int sh : strideH) { for (int sw : strideW) { for (int kh : sizeH) { for (int kw : sizeW) { for (int ph : padH) { for (int pw : padW) { System.out.println("Before assertion"); if ((w - kw + 2 * pw) % sw != 0 || (h - kh + 2 * ph) % sh != 0) continue; //(w-kp+2*pw)/sw + 1 is not an integer, i.e., number of outputs doesn't fit INDArray in = Nd4j.rand(new int[] {m, d, h, w}); assertEquals(in.data().allocationMode(), mode); assertEquals(in.data().dataType(), type); INDArray im2col = Convolution.im2col(in, kh, kw, sh, sw, ph, pw, false); //Cheating, to get correct shape for input INDArray imgOutOld = OldConvolution.col2im(im2col, sh, sw, ph, pw, h, w); INDArray imgOutNew = Convolution.col2im(im2col, sh, sw, ph, pw, h, w); System.out.println("F order test"); System.out.println(imgOutOld); System.out.println(imgOutNew); assertEquals(imgOutOld, imgOutNew); } } } } } } } } } } } DataTypeUtil.setDTypeForContext(inititalType); }
Example 7
Source File: ConvolutionTestsC.java From nd4j with Apache License 2.0 | 4 votes |
@Test @Ignore public void testCompareIm2ColImpl() { int[] miniBatches = {1, 3, 5}; int[] depths = {1, 3, 5}; int[] inHeights = {5, 21}; int[] inWidths = {5, 21}; int[] strideH = {1, 2}; int[] strideW = {1, 2}; int[] sizeW = {1, 2, 3}; int[] sizeH = {1, 2, 3}; int[] padH = {0, 1, 2}; int[] padW = {0, 1, 2}; boolean[] coverall = {false, true}; DataBuffer.Type[] types = new DataBuffer.Type[] {DataBuffer.Type.FLOAT, DataBuffer.Type.DOUBLE, DataBuffer.Type.FLOAT, DataBuffer.Type.DOUBLE}; DataBuffer.AllocationMode[] modes = new DataBuffer.AllocationMode[] {DataBuffer.AllocationMode.HEAP, DataBuffer.AllocationMode.HEAP, DataBuffer.AllocationMode.DIRECT, DataBuffer.AllocationMode.DIRECT}; String factoryClassName = Nd4j.factory().getClass().toString().toLowerCase(); if (factoryClassName.contains("jcublas") || factoryClassName.contains("cuda")) { //Only test direct for CUDA; test all for CPU types = new DataBuffer.Type[] {DataBuffer.Type.FLOAT, DataBuffer.Type.DOUBLE}; modes = new DataBuffer.AllocationMode[] {DataBuffer.AllocationMode.DIRECT, DataBuffer.AllocationMode.DIRECT}; } DataBuffer.Type initialType = Nd4j.dataType(); for (int i = 0; i < types.length; i++) { DataBuffer.Type type = types[i]; DataBuffer.AllocationMode mode = modes[i]; DataTypeUtil.setDTypeForContext(type); Nd4j.alloc = mode; AllocUtil.setAllocationModeForContext(mode); for (int m : miniBatches) { for (int d : depths) { for (int h : inHeights) { for (int w : inWidths) { for (int sh : strideH) { for (int sw : strideW) { for (int kh : sizeH) { for (int kw : sizeW) { for (int ph : padH) { for (int pw : padW) { if ((w - kw + 2 * pw) % sw != 0 || (h - kh + 2 * ph) % sh != 0) continue; //(w-kp+2*pw)/sw + 1 is not an integer, i.e., number of outputs doesn't fit System.out.println("Running " + m + " " + d + " " + h + " " + w); for (boolean cAll : coverall) { INDArray in = Nd4j.rand(new int[] {m, d, h, w}); //assertEquals(in.data().allocationMode(), mode); //assertEquals(in.data().dataType(), opType); INDArray outOrig = OldConvolution.im2col(in, kh, kw, sh, sw, ph, pw, -1, cAll); //Old implementation INDArray outNew = Convolution.im2col(in, kh, kw, sh, sw, ph, pw, cAll); //Current implementation assertEquals(outOrig, outNew); } } } } } } } } } } } } DataTypeUtil.setDTypeForContext(initialType); }
Example 8
Source File: ConvolutionTests.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test @Ignore public void testCompareIm2ColImpl() { int[] miniBatches = {1, 3, 5}; int[] depths = {1, 3, 5}; int[] inHeights = {5, 21}; int[] inWidths = {5, 21}; int[] strideH = {1, 2}; int[] strideW = {1, 2}; int[] sizeW = {1, 2, 3}; int[] sizeH = {1, 2, 3}; int[] padH = {0, 1, 2}; int[] padW = {0, 1, 2}; boolean[] coverall = {false, true}; DataType[] types = new DataType[] {DataType.FLOAT, DataType.FLOAT, DataType.FLOAT, DataType.FLOAT}; DataBuffer.AllocationMode[] modes = new DataBuffer.AllocationMode[] {DataBuffer.AllocationMode.HEAP, DataBuffer.AllocationMode.HEAP, DataBuffer.AllocationMode.DIRECT, DataBuffer.AllocationMode.DIRECT}; String factoryClassName = Nd4j.factory().getClass().toString().toLowerCase(); if (factoryClassName.contains("jcublas") || factoryClassName.contains("cuda")) { //Only test direct for CUDA; test all for CPU types = new DataType[] {DataType.FLOAT, DataType.FLOAT}; modes = new DataBuffer.AllocationMode[] {DataBuffer.AllocationMode.DIRECT, DataBuffer.AllocationMode.DIRECT}; } DataType initialType = Nd4j.dataType(); for (int i = 0; i < types.length; i++) { DataType type = types[i]; DataBuffer.AllocationMode mode = modes[i]; DataTypeUtil.setDTypeForContext(type); Nd4j.alloc = mode; AllocUtil.setAllocationModeForContext(mode); for (int m : miniBatches) { for (int d : depths) { for (int h : inHeights) { for (int w : inWidths) { for (int sh : strideH) { for (int sw : strideW) { for (int kh : sizeH) { for (int kw : sizeW) { for (int ph : padH) { for (int pw : padW) { if ((w - kw + 2 * pw) % sw != 0 || (h - kh + 2 * ph) % sh != 0) continue; //(w-kp+2*pW)/sw + 1 is not an integer, i.e., number of outputs doesn't fit System.out.println("Running " + m + " " + d + " " + h + " " + w); for (boolean cAll : coverall) { INDArray in = Nd4j.rand(new int[] {m, d, h, w}); //assertEquals(in.data().allocationMode(), mode); //assertEquals(in.data().dataType(), opType); INDArray outOrig = OldConvolution.im2col(in, kh, kw, sh, sw, ph, pw, -1, cAll); //Old implementation INDArray outNew = Convolution.im2col(in, kh, kw, sh, sw, ph, pw, cAll); //Current implementation assertArrayEquals(outOrig.data().asFloat(), outNew.data().asFloat(), 0.01f); assertEquals(outOrig, outNew); } } } } } } } } } } } } DataTypeUtil.setDTypeForContext(initialType); }
Example 9
Source File: ConvolutionTests.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test @Ignore public void testCompareIm2Col() { int[] miniBatches = {1, 3, 5}; int[] depths = {1, 3, 5}; int[] inHeights = {5, 21}; int[] inWidths = {5, 21}; int[] strideH = {1, 2}; int[] strideW = {1, 2}; int[] sizeW = {1, 2, 3}; int[] sizeH = {1, 2, 3}; int[] padH = {0, 1, 2}; int[] padW = {0, 1, 2}; DataType[] types = new DataType[] {DataType.FLOAT, DataType.FLOAT, DataType.FLOAT, DataType.FLOAT}; DataBuffer.AllocationMode[] modes = new DataBuffer.AllocationMode[] {DataBuffer.AllocationMode.HEAP, DataBuffer.AllocationMode.HEAP, DataBuffer.AllocationMode.DIRECT, DataBuffer.AllocationMode.DIRECT}; String factoryClassName = Nd4j.factory().getClass().toString().toLowerCase(); if (factoryClassName.contains("jcublas") || factoryClassName.contains("cuda")) { //Only test direct for CUDA; test all for CPU types = new DataType[] {DataType.FLOAT, DataType.FLOAT}; modes = new DataBuffer.AllocationMode[] {DataBuffer.AllocationMode.DIRECT, DataBuffer.AllocationMode.DIRECT}; } DataType inititalType = Nd4j.dataType(); for (int i = 0; i < types.length; i++) { DataType type = types[i]; DataBuffer.AllocationMode mode = modes[i]; DataTypeUtil.setDTypeForContext(type); Nd4j.alloc = mode; for (int m : miniBatches) { for (int d : depths) { for (int h : inHeights) { for (int w : inWidths) { for (int sh : strideH) { for (int sw : strideW) { for (int kh : sizeH) { for (int kw : sizeW) { for (int ph : padH) { for (int pw : padW) { System.out.println("Before assertion"); if ((w - kw + 2 * pw) % sw != 0 || (h - kh + 2 * ph) % sh != 0) continue; //(w-kp+2*pW)/sw + 1 is not an integer, i.e., number of outputs doesn't fit INDArray in = Nd4j.rand(new int[] {m, d, h, w}); assertEquals(in.data().allocationMode(), mode); assertEquals(in.data().dataType(), type); INDArray im2col = Convolution.im2col(in, kh, kw, sh, sw, ph, pw, false); //Cheating, to get correct shape for input INDArray imgOutOld = OldConvolution.col2im(im2col, sh, sw, ph, pw, h, w); INDArray imgOutNew = Convolution.col2im(im2col, sh, sw, ph, pw, h, w); System.out.println("F order test"); System.out.println(imgOutOld); System.out.println(imgOutNew); assertEquals(imgOutOld, imgOutNew); } } } } } } } } } } } DataTypeUtil.setDTypeForContext(inititalType); }
Example 10
Source File: ConvolutionTestsC.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test @Ignore public void testCompareIm2ColImpl() { int[] miniBatches = {1, 3, 5}; int[] depths = {1, 3, 5}; int[] inHeights = {5, 21}; int[] inWidths = {5, 21}; int[] strideH = {1, 2}; int[] strideW = {1, 2}; int[] sizeW = {1, 2, 3}; int[] sizeH = {1, 2, 3}; int[] padH = {0, 1, 2}; int[] padW = {0, 1, 2}; boolean[] coverall = {false, true}; DataType[] types = new DataType[] {DataType.FLOAT, DataType.DOUBLE, DataType.FLOAT, DataType.DOUBLE}; DataBuffer.AllocationMode[] modes = new DataBuffer.AllocationMode[] {DataBuffer.AllocationMode.HEAP, DataBuffer.AllocationMode.HEAP, DataBuffer.AllocationMode.DIRECT, DataBuffer.AllocationMode.DIRECT}; String factoryClassName = Nd4j.factory().getClass().toString().toLowerCase(); if (factoryClassName.contains("jcublas") || factoryClassName.contains("cuda")) { //Only test direct for CUDA; test all for CPU types = new DataType[] {DataType.FLOAT, DataType.DOUBLE}; modes = new DataBuffer.AllocationMode[] {DataBuffer.AllocationMode.DIRECT, DataBuffer.AllocationMode.DIRECT}; } DataType initialType = Nd4j.dataType(); for (int i = 0; i < types.length; i++) { DataType type = types[i]; DataBuffer.AllocationMode mode = modes[i]; DataTypeUtil.setDTypeForContext(type); Nd4j.alloc = mode; AllocUtil.setAllocationModeForContext(mode); for (int m : miniBatches) { for (int d : depths) { for (int h : inHeights) { for (int w : inWidths) { for (int sh : strideH) { for (int sw : strideW) { for (int kh : sizeH) { for (int kw : sizeW) { for (int ph : padH) { for (int pw : padW) { if ((w - kw + 2 * pw) % sw != 0 || (h - kh + 2 * ph) % sh != 0) continue; //(w-kp+2*pW)/sw + 1 is not an integer, i.e., number of outputs doesn't fit System.out.println("Running " + m + " " + d + " " + h + " " + w); for (boolean cAll : coverall) { INDArray in = Nd4j.rand(new int[] {m, d, h, w}); //assertEquals(in.data().allocationMode(), mode); //assertEquals(in.data().dataType(), opType); INDArray outOrig = OldConvolution.im2col(in, kh, kw, sh, sw, ph, pw, -1, cAll); //Old implementation INDArray outNew = Convolution.im2col(in, kh, kw, sh, sw, ph, pw, cAll); //Current implementation assertEquals(outOrig, outNew); } } } } } } } } } } } } DataTypeUtil.setDTypeForContext(initialType); }