org.nd4j.linalg.api.memory.conf.WorkspaceConfiguration Java Examples

The following examples show how to use org.nd4j.linalg.api.memory.conf.WorkspaceConfiguration. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: FloatDataBufferTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testReallocationWorkspace() {
    WorkspaceConfiguration initialConfig = WorkspaceConfiguration.builder().initialSize(10 * 1024L * 1024L)
                    .policyAllocation(AllocationPolicy.STRICT).policyLearning(LearningPolicy.NONE).build();
    MemoryWorkspace workspace = Nd4j.getWorkspaceManager().getAndActivateWorkspace(initialConfig, "SOME_ID");

    DataBuffer buffer = Nd4j.createBuffer(new float[] {1, 2, 3, 4});
    assertTrue(buffer.isAttached());
    float[] old = buffer.asFloat();
    assertEquals(4, buffer.capacity());
    buffer.reallocate(6);
    assertEquals(6, buffer.capacity());
    float[] newBuf = buffer.asFloat();
    assertArrayEquals(old, newBuf, 1e-4F);
    workspace.close();
}
 
Example #2
Source File: SpecialWorkspaceTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testAlignment_1() {
    WorkspaceConfiguration initialConfig = WorkspaceConfiguration.builder().initialSize(10 * 1024L * 1024L)
            .policyAllocation(AllocationPolicy.STRICT).policyLearning(LearningPolicy.NONE).build();
    MemoryWorkspace workspace = Nd4j.getWorkspaceManager().getAndActivateWorkspace(initialConfig, "WS132143452343");

    for( int j=0; j<100; j++ ){

        try(MemoryWorkspace ws = workspace.notifyScopeEntered()) {

            for (int x = 0; x < 10; x++) {
                //System.out.println("Start iteration (" + j + "," + x + ")");
                INDArray arr = Nd4j.linspace(1,10,10, DataType.DOUBLE).reshape(1,10);
                INDArray sum = arr.sum(true, 1);
                Nd4j.create(DataType.BOOL, x+1);        //NOTE: no crash if set to FLOAT/HALF, No crash if removed entirely; same crash for BOOL/UBYTE
                //System.out.println("End iteration (" + j + "," + x + ")");
            }
        }
    }
}
 
Example #3
Source File: BasicWorkspaceTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
@Ignore
public void testMmap2() throws Exception {
    // we don't support MMAP on cuda yet
    if (Nd4j.getExecutioner().getClass().getName().toLowerCase().contains("cuda"))
        return;

    File tmp = File.createTempFile("tmp", "fdsfdf");
    tmp.deleteOnExit();
    Nd4jWorkspace.fillFile(tmp, 100000);

    WorkspaceConfiguration mmap = WorkspaceConfiguration.builder()
            .policyLocation(LocationPolicy.MMAP)
            .tempFilePath(tmp.getAbsolutePath())
            .build();

    MemoryWorkspace ws = Nd4j.getWorkspaceManager().getAndActivateWorkspace(mmap, "M3");

    INDArray mArray = Nd4j.create(DOUBLE, 100);
    mArray.assign(10f);

    assertEquals(1000f, mArray.sumNumber().floatValue(), 1e-5);

    ws.notifyScopeLeft();
}
 
Example #4
Source File: SpecialTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
    public void reproduceWorkspaceCrash_3(){
        val conf = WorkspaceConfiguration.builder().build();

        val ws = Nd4j.getWorkspaceManager().getWorkspaceForCurrentThread(conf, "WS");
        val dtypes = new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF, DataType.LONG, DataType.INT, DataType.SHORT, DataType.BYTE, DataType.UBYTE, DataType.BOOL};
        for (val dX : dtypes) {
            for (val dZ: dtypes) {
                try(val ws2 = ws.notifyScopeEntered()) {
                    val array = Nd4j.create(dX, 2, 5).assign(1);
//                    log.info("Trying to cast {} to {}", dX, dZ);
                    val casted = array.castTo(dZ);
                    val exp = Nd4j.create(dZ, 2, 5).assign(1);
                    assertEquals(exp, casted);

                    Nd4j.getExecutioner().commit();
                }
            }
        }
    }
 
Example #5
Source File: InterleavedDataSetCallback.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
protected void initializeWorkspaces(long size) {
    WorkspaceConfiguration configuration = WorkspaceConfiguration.builder().initialSize(size)
                    .overallocationLimit(bufferSize).policyReset(ResetPolicy.ENDOFBUFFER_REACHED)
                    .policyAllocation(AllocationPolicy.OVERALLOCATE).policySpill(SpillPolicy.EXTERNAL)
                    .policyLearning(LearningPolicy.NONE).build();

    int numDevices = Nd4j.getAffinityManager().getNumberOfDevices();
    int cDevice = Nd4j.getAffinityManager().getDeviceForCurrentThread();
    for (int i = 0; i < numDevices; i++) {
        Nd4j.getAffinityManager().unsafeSetDevice(i);
        workspaces.add(Nd4j.getWorkspaceManager().createNewWorkspace(configuration, "IDSC-" + i, i));
    }

    Nd4j.getAffinityManager().unsafeSetDevice(cDevice);
    numWorkspaces = numDevices;
    isInitialized = true;
}
 
Example #6
Source File: SpecialWorkspaceTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testMmapedWorkspace_Path_Limits_1() throws Exception {
    if (!Nd4j.getEnvironment().isCPU())
        return;

    // getting very long file name
    val builder = new StringBuilder("long_file_name_");
    for (int e = 0; e < 100; e++)
        builder.append("9");


    val tmpFile = Files.createTempFile("some", builder.toString());
    val mmap = WorkspaceConfiguration.builder()
            .initialSize(200 * 1024L * 1024L) // 200mbs
            .tempFilePath(tmpFile.toAbsolutePath().toString())
            .policyLocation(LocationPolicy.MMAP)
            .policyLearning(LearningPolicy.NONE)
            .build();

    try (val ws = Nd4j.getWorkspaceManager().getAndActivateWorkspace(mmap, "M2")) {
        val x = Nd4j.rand(DataType.FLOAT, 1024);
    }
}
 
Example #7
Source File: EndlessWorkspaceTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void endlessTest5() throws Exception {
    while (true) {
        Thread thread = new Thread(new Runnable() {
            @Override
            public void run() {
                WorkspaceConfiguration wsConf = WorkspaceConfiguration.builder().initialSize(10 * 1024L * 1024L)
                                .policyLearning(LearningPolicy.NONE).build();

                try (MemoryWorkspace ws = Nd4j.getWorkspaceManager().getAndActivateWorkspace(wsConf, "PEW-PEW")) {
                    INDArray array = Nd4j.create(10);
                }
            }
        });

        thread.start();
        thread.join();

        System.gc();
    }
}
 
Example #8
Source File: EndlessWorkspaceTests.java    From nd4j with Apache License 2.0 6 votes vote down vote up
/**
 * This test checks for allocations within single workspace, without any spills
 *
 * @throws Exception
 */
@Test
public void endlessTest1() throws Exception {

    Nd4j.getWorkspaceManager().setDefaultWorkspaceConfiguration(
                    WorkspaceConfiguration.builder().initialSize(100 * 1024L * 1024L).build());

    Nd4j.getMemoryManager().togglePeriodicGc(false);

    AtomicLong counter = new AtomicLong(0);
    while (true) {
        try (MemoryWorkspace workspace = Nd4j.getWorkspaceManager().getAndActivateWorkspace()) {
            long time1 = System.nanoTime();
            INDArray array = Nd4j.create(1024 * 1024);
            long time2 = System.nanoTime();
            array.addi(1.0f);
            assertEquals(1.0f, array.meanNumber().floatValue(), 0.1f);

            if (counter.incrementAndGet() % 1000 == 0)
                log.info("{} iterations passed... Allocation time: {} ns", counter.get(), time2 - time1);
        }
    }
}
 
Example #9
Source File: EndlessWorkspaceTests.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Test
public void endlessTest5() throws Exception {
    while (true) {
        Thread thread = new Thread(new Runnable() {
            @Override
            public void run() {
                WorkspaceConfiguration wsConf = WorkspaceConfiguration.builder().initialSize(10 * 1024L * 1024L)
                                .policyLearning(LearningPolicy.NONE).build();

                try (MemoryWorkspace ws = Nd4j.getWorkspaceManager().getAndActivateWorkspace(wsConf, "PEW-PEW")) {
                    INDArray array = Nd4j.create(10);
                }
            }
        });

        thread.start();
        thread.join();

        System.gc();
    }
}
 
Example #10
Source File: SpecialTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void reproduceWorkspaceCrash_5(){
    val conf = WorkspaceConfiguration.builder().build();

    val ws = Nd4j.getWorkspaceManager().getWorkspaceForCurrentThread(conf, "WS");

    INDArray arr = Nd4j.create(new double[]{1, 0, 0, 0, 1, 0, 0, 0, 0, 0}, new long[]{1, 10});

    Nd4j.setDefaultDataTypes(DataType.DOUBLE, DataType.DOUBLE);
    assertEquals(DataType.DOUBLE, arr.dataType());

    for( int i=0; i<100; i++ ) {
        try(val ws2 = ws.notifyScopeEntered()) {
            INDArray crash = arr.castTo(DataType.BOOL).castTo(DataType.DOUBLE);
            crash.dup();
        }
    }
}
 
Example #11
Source File: IntDataBufferTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testReallocationWorkspace() {
    WorkspaceConfiguration initialConfig = WorkspaceConfiguration.builder().initialSize(10 * 1024L * 1024L)
                    .policyAllocation(AllocationPolicy.STRICT).policyLearning(LearningPolicy.NONE).build();
    MemoryWorkspace workspace = Nd4j.getWorkspaceManager().getAndActivateWorkspace(initialConfig, "SOME_ID");

    DataBuffer buffer = Nd4j.createBuffer(new int[] {1, 2, 3, 4});
    val old = buffer.asInt();
    assertTrue(buffer.isAttached());
    assertEquals(4, buffer.capacity());
    buffer.reallocate(6);
    assertEquals(6, buffer.capacity());
    val newContent = buffer.asInt();
    assertEquals(6, newContent.length);
    assertArrayEquals(old, Arrays.copyOf(newContent, old.length));
    workspace.close();
}
 
Example #12
Source File: BasicWorkspaceTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testOverallocation2() {
    WorkspaceConfiguration overallocationConfig = WorkspaceConfiguration.builder().initialSize(0)
                    .maxSize(10 * 1024 * 1024).overallocationLimit(1.0)
                    .policyAllocation(AllocationPolicy.OVERALLOCATE).policyLearning(LearningPolicy.FIRST_LOOP)
                    .policyMirroring(MirroringPolicy.FULL).policySpill(SpillPolicy.EXTERNAL).build();

    Nd4jWorkspace workspace = (Nd4jWorkspace) Nd4j.getWorkspaceManager().createNewWorkspace(overallocationConfig);

    //Nd4j.getMemoryManager().setCurrentWorkspace(workspace);

    assertEquals(0, workspace.getCurrentSize());

    try (MemoryWorkspace cW = workspace.notifyScopeEntered()) {
        INDArray array = Nd4j.create(DOUBLE, 100);
    }

    // should be 800 = 100 elements * 4 bytes per element * 2 as overallocation coefficient
    assertEquals(200 * Nd4j.sizeOfDataType(DOUBLE), workspace.getCurrentSize());
}
 
Example #13
Source File: DebugModeTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testSpillMode_1() {
    Nd4j.getWorkspaceManager().setDebugMode(DebugMode.SPILL_EVERYTHING);

    val basicConfig = WorkspaceConfiguration.builder()
            .initialSize(10 * 1024 * 1024).maxSize(10 * 1024 * 1024).overallocationLimit(0.1)
            .policyAllocation(AllocationPolicy.STRICT).policyLearning(LearningPolicy.FIRST_LOOP)
            .policyMirroring(MirroringPolicy.FULL).policySpill(SpillPolicy.EXTERNAL).build();

    try (val ws = (Nd4jWorkspace) Nd4j.getWorkspaceManager().getAndActivateWorkspace(basicConfig, "R_119_1993")) {
        assertEquals(10 * 1024 * 1024L, ws.getCurrentSize());
        assertEquals(0, ws.getDeviceOffset());
        assertEquals(0, ws.getPrimaryOffset());

        val array = Nd4j.create(DataType.DOUBLE, 10, 10).assign(1.0f);
        assertTrue(array.isAttached());

        // nothing should get into workspace
        assertEquals(0, ws.getPrimaryOffset());
        assertEquals(0, ws.getDeviceOffset());

        // array buffer should be spilled now
        assertEquals(10 * 10 * Nd4j.sizeOfDataType(DataType.DOUBLE), ws.getSpilledSize());
    }
}
 
Example #14
Source File: BasicWorkspaceTests.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testMmap2() throws Exception {
    // we don't support MMAP on cuda yet
    if (Nd4j.getExecutioner().getClass().getName().toLowerCase().contains("cuda"))
        return;

    File tmp = File.createTempFile("tmp", "fdsfdf");
    tmp.deleteOnExit();
    Nd4jWorkspace.fillFile(tmp, 100000);

    WorkspaceConfiguration mmap = WorkspaceConfiguration.builder()
            .policyLocation(LocationPolicy.MMAP)
            .tempFilePath(tmp.getAbsolutePath())
            .build();

    MemoryWorkspace ws = Nd4j.getWorkspaceManager().getAndActivateWorkspace(mmap, "M3");

    INDArray mArray = Nd4j.create(100);
    mArray.assign(10f);

    assertEquals(1000f, mArray.sumNumber().floatValue(), 1e-5);

    ws.notifyScopeLeft();
}
 
Example #15
Source File: DoubleDataBufferTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testReallocationWorkspace() {
    WorkspaceConfiguration initialConfig = WorkspaceConfiguration.builder().initialSize(10 * 1024L * 1024L)
                    .policyAllocation(AllocationPolicy.STRICT).policyLearning(LearningPolicy.NONE).build();
    MemoryWorkspace workspace = Nd4j.getWorkspaceManager().getAndActivateWorkspace(initialConfig, "SOME_ID");

    DataBuffer buffer = Nd4j.createBuffer(new double[] {1, 2, 3, 4});
    double[] old = buffer.asDouble();
    assertTrue(buffer.isAttached());
    assertEquals(4, buffer.capacity());
    buffer.reallocate(6);
    assertEquals(6, buffer.capacity());
    assertArrayEquals(old, Arrays.copyOf(buffer.asDouble(), 4), 1e-1);
    workspace.close();

}
 
Example #16
Source File: CudaWorkspaceTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testCircularWorkspaceAsymmetry_3() {
    // circular workspace mode
    val configuration = WorkspaceConfiguration.builder().initialSize(10 * 1024 * 1024)
            .policyReset(ResetPolicy.ENDOFBUFFER_REACHED).policyAllocation(AllocationPolicy.STRICT)
            .policySpill(SpillPolicy.FAIL).policyLearning(LearningPolicy.NONE).build();

    val root = Nd4j.create(DataType.FLOAT, 1000000).assign(119);

    for (int e = 0; e < 100; e++) {
        try (val ws = (CudaWorkspace) Nd4j.getWorkspaceManager().getAndActivateWorkspace(configuration, "circular_ws")) {
            val array = Nd4j.create(DataType.FLOAT, root.shape());
            array.assign(root);

            val second = Nd4j.create(DataType.FLOAT, root.shape());

            array.data().getInt(3);
        }
    }
}
 
Example #17
Source File: SpecialWorkspaceTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testDeleteMappedFile_1() throws Exception {
    if (!Nd4j.getEnvironment().isCPU())
        return;

    val tmpFile = Files.createTempFile("some", "file");
    val mmap = WorkspaceConfiguration.builder()
            .initialSize(200 * 1024L * 1024L) // 200mbs
            .tempFilePath(tmpFile.toAbsolutePath().toString())
            .policyLocation(LocationPolicy.MMAP)
            .policyLearning(LearningPolicy.NONE)
            .build();

    try (val ws = Nd4j.getWorkspaceManager().getAndActivateWorkspace(mmap, "M2")) {
        val x = Nd4j.rand(DataType.FLOAT, 1024);
    }

    Nd4j.getWorkspaceManager().destroyAllWorkspacesForCurrentThread();

    Files.delete(tmpFile);
}
 
Example #18
Source File: CudaWorkspaceTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testCircularWorkspaceAsymmetry_2() {
    // circular workspace mode
    val configuration = WorkspaceConfiguration.builder().initialSize(10 * 1024 * 1024)
            .policyReset(ResetPolicy.ENDOFBUFFER_REACHED).policyAllocation(AllocationPolicy.STRICT)
            .policySpill(SpillPolicy.FAIL).policyLearning(LearningPolicy.NONE).build();

    val root = Nd4j.create(DataType.FLOAT, 1000000).assign(119);

    for (int e = 0; e < 100; e++) {
        try (val ws = (CudaWorkspace) Nd4j.getWorkspaceManager().getAndActivateWorkspace(configuration, "circular_ws")) {
            val array = Nd4j.create(DataType.FLOAT, root.shape());
            array.assign(root);

            array.data().getInt(3);

            assertEquals(ws.getHostOffset(), ws.getDeviceOffset());
        }
    }
}
 
Example #19
Source File: DL4JSameDiffMemoryMgr.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public INDArray allocate(boolean detached, DataType dataType, long... shape) {
    String wsName = detached ? outputWs : workingMemoryWs;
    WorkspaceConfiguration wsConf = detached ? confOutput : confWorking;

    if(wsName == null){
        //Scoped out
        INDArray ret = Nd4j.createUninitializedDetached(dataType, shape);
        Preconditions.checkState(!ret.isAttached(), "Returned array should be detached");
        return ret;
    } else {
        MemoryWorkspace ws = Nd4j.getWorkspaceManager().getWorkspaceForCurrentThread(wsConf, wsName);
        try (MemoryWorkspace mw = ws.notifyScopeBorrowed()) {
            return Nd4j.createUninitialized(dataType, shape);
        }
    }
}
 
Example #20
Source File: MixedDataTypesTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
    public void testWorkspaceBool(){
        val conf = WorkspaceConfiguration.builder().minSize(10 * 1024 * 1024)
                .overallocationLimit(1.0).policyAllocation(AllocationPolicy.OVERALLOCATE)
                .policyLearning(LearningPolicy.FIRST_LOOP).policyMirroring(MirroringPolicy.FULL)
                .policySpill(SpillPolicy.EXTERNAL).build();

        val ws = Nd4j.getWorkspaceManager().getWorkspaceForCurrentThread(conf, "WS");

        for( int i=0; i<10; i++ ) {
            try (val workspace = (Nd4jWorkspace)ws.notifyScopeEntered() ) {
                val bool = Nd4j.create(DataType.BOOL, 1, 10);
                val dbl = Nd4j.create(DataType.DOUBLE, 1, 10);

                val boolAttached = bool.isAttached();
                val doubleAttached = dbl.isAttached();

//                System.out.println(i + "\tboolAttached=" + boolAttached + ", doubleAttached=" + doubleAttached );
                //System.out.println("bool: " + bool);        //java.lang.IllegalStateException: Indexer must never be null
                //System.out.println("double: " + dbl);
            }
        }
    }
 
Example #21
Source File: AccountingTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testWorkspaceAccounting_1() {
    val deviceId = Nd4j.getAffinityManager().getDeviceForCurrentThread();
    val wsConf = WorkspaceConfiguration.builder()
            .initialSize(10 * 1024 * 1024)
            .policyAllocation(AllocationPolicy.STRICT)
            .policyLearning(LearningPolicy.FIRST_LOOP)
            .build();

    val before = Nd4j.getMemoryManager().allocatedMemory(deviceId);

    val workspace = Nd4j.getWorkspaceManager().createNewWorkspace(wsConf, "random_name_here");

    val middle = Nd4j.getMemoryManager().allocatedMemory(deviceId);

    workspace.destroyWorkspace(true);

    val after = Nd4j.getMemoryManager().allocatedMemory(deviceId);

    log.info("Before: {}; Middle: {}; After: {}", before, middle, after);
    assertTrue(middle > before);
    assertTrue(after < middle);
}
 
Example #22
Source File: CudaWorkspaceTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testCircularWorkspaceAsymmetry_1() {
    // circular workspace mode
    val configuration = WorkspaceConfiguration.builder().initialSize(10 * 1024 * 1024)
            .policyReset(ResetPolicy.ENDOFBUFFER_REACHED).policyAllocation(AllocationPolicy.STRICT)
            .policySpill(SpillPolicy.FAIL).policyLearning(LearningPolicy.NONE).build();


    try (val ws = (CudaWorkspace) Nd4j.getWorkspaceManager().getAndActivateWorkspace(configuration, "circular_ws")) {
        val array = Nd4j.create(DataType.FLOAT, 10, 10);

        assertEquals(0, ws.getHostOffset());
        assertNotEquals(0, ws.getDeviceOffset());

        // we expect that this array has no data/buffer on HOST side
        assertEquals(AffinityManager.Location.DEVICE, Nd4j.getAffinityManager().getActiveLocation(array));

        // since this array doesn't have HOST buffer - it will allocate one now
        array.getDouble(3L);

        assertEquals(ws.getHostOffset(), ws.getDeviceOffset());
    }

    try (val ws = (CudaWorkspace) Nd4j.getWorkspaceManager().getAndActivateWorkspace(configuration, "circular_ws")) {
        assertEquals(ws.getHostOffset(), ws.getDeviceOffset());
    }

    Nd4j.getWorkspaceManager().destroyAllWorkspacesForCurrentThread();
}
 
Example #23
Source File: DebugModeTests.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testSpillMode_2() {
    Nd4j.getWorkspaceManager().setDebugMode(DebugMode.SPILL_EVERYTHING);

    val basicConfig = WorkspaceConfiguration.builder()
            .initialSize(0).maxSize(10 * 1024 * 1024).overallocationLimit(0.1)
            .policyAllocation(AllocationPolicy.STRICT).policyLearning(LearningPolicy.FIRST_LOOP)
            .policyMirroring(MirroringPolicy.FULL).policySpill(SpillPolicy.EXTERNAL).build();

    try (val ws = (Nd4jWorkspace) Nd4j.getWorkspaceManager().getAndActivateWorkspace(basicConfig, "R_119_1992")) {
        assertEquals(0L, ws.getCurrentSize());
        assertEquals(0, ws.getDeviceOffset());
        assertEquals(0, ws.getPrimaryOffset());

        val array = Nd4j.create(DataType.DOUBLE, 10, 10).assign(1.0f);

        assertTrue(array.isAttached());

        // nothing should get into workspace
        assertEquals(0, ws.getPrimaryOffset());
        assertEquals(0, ws.getDeviceOffset());

        // array buffer should be spilled now
        assertEquals(10 * 10 * Nd4j.sizeOfDataType(DataType.DOUBLE), ws.getSpilledSize());
    }

    try (val ws = (Nd4jWorkspace) Nd4j.getWorkspaceManager().getAndActivateWorkspace(basicConfig, "R_119_1992")) {
        assertEquals(0L, ws.getCurrentSize());
        assertEquals(0, ws.getDeviceOffset());
        assertEquals(0, ws.getPrimaryOffset());
        assertEquals(0, ws.getSpilledSize());
    }
}
 
Example #24
Source File: BaseWorkspaceMgr.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Override
public void setWorkspace(@NonNull T forEnum, @NonNull String wsName, @NonNull WorkspaceConfiguration configuration) {
    if(scopeOutOfWs.contains(forEnum)){
        scopeOutOfWs.remove(forEnum);
    }
    setWorkspaceName(forEnum, wsName);
    setConfiguration(forEnum, configuration);
}
 
Example #25
Source File: SpecialTests.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
    public void testYoloStyle(){
        WorkspaceConfiguration WS_ALL_LAYERS_ACT_CONFIG = WorkspaceConfiguration.builder()
                .initialSize(0)
                .overallocationLimit(0.05)
                .policyLearning(LearningPolicy.FIRST_LOOP)
                .policyReset(ResetPolicy.BLOCK_LEFT)
                .policySpill(SpillPolicy.REALLOCATE)
                .policyAllocation(AllocationPolicy.OVERALLOCATE)
                .build();



        for( int i=0; i<10; i++ ){
            try(val ws = Nd4j.getWorkspaceManager().getAndActivateWorkspace(WS_ALL_LAYERS_ACT_CONFIG, "ws")){
//                System.out.println("STARTING: " + i);

                INDArray objectPresentMask = Nd4j.create(DataType.BOOL, 1,4,4);

                long[] shape = {1,3,2,4,4};
                INDArray noIntMask1 = Nd4j.createUninitialized(DataType.BOOL, shape, 'c');
                INDArray noIntMask2 = Nd4j.createUninitialized(DataType.BOOL, shape, 'c');

                noIntMask1 = Transforms.or(noIntMask1.get(all(), all(), point(0), all(), all()), noIntMask1.get(all(), all(), point(1), all(), all()) );    //Shape: [mb, b, H, W]. Values 1 if no intersection
                noIntMask2 = Transforms.or(noIntMask2.get(all(), all(), point(0), all(), all()), noIntMask2.get(all(), all(), point(1), all(), all()) );
                INDArray noIntMask = Transforms.or(noIntMask1, noIntMask2 );

                Nd4j.getExecutioner().commit();

                INDArray intMask = Transforms.not(noIntMask); //Values 0 if no intersection
                Nd4j.getExecutioner().commit();

                Broadcast.mul(intMask, objectPresentMask, intMask, 0, 2, 3);
                Nd4j.getExecutioner().commit();
//                System.out.println("DONE: " + i);
            }
        }
    }
 
Example #26
Source File: EndlessWorkspaceTests.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
/**
 * This endless test checks for nested workspaces and cross-workspace memory use
 *
 * @throws Exception
 */
@Test
public void endlessTest3() {
    Nd4j.getWorkspaceManager().setDefaultWorkspaceConfiguration(
                    WorkspaceConfiguration.builder().initialSize(10 * 1024L * 1024L).build());

    Nd4j.getMemoryManager().togglePeriodicGc(false);
    AtomicLong counter = new AtomicLong(0);
    while (true) {
        try (MemoryWorkspace workspace1 = Nd4j.getWorkspaceManager().getAndActivateWorkspace("WS_1")) {
            INDArray array1 = Nd4j.create(2 * 1024 * 1024);
            array1.assign(1.0);

            try (MemoryWorkspace workspace2 = Nd4j.getWorkspaceManager().getAndActivateWorkspace("WS_2")) {
                INDArray array2 = Nd4j.create(2 * 1024 * 1024);
                array2.assign(1.0);

                array1.addi(array2);

                assertEquals(2.0f, array1.meanNumber().floatValue(), 0.01);

                if (counter.incrementAndGet() % 1000 == 0) {
                    log.info("{} iterations passed...", counter.get());
                    System.gc();
                }
            }
        }
    }
}
 
Example #27
Source File: BasicWorkspaceTests.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testMmap1() {
    // we don't support MMAP on cuda yet
    if (Nd4j.getExecutioner().getClass().getName().toLowerCase().contains("cuda"))
        return;

    WorkspaceConfiguration mmap = WorkspaceConfiguration.builder()
            .initialSize(1000000)
            .policyLocation(LocationPolicy.MMAP)
            .policyLearning(LearningPolicy.NONE)
            .build();

    MemoryWorkspace ws = Nd4j.getWorkspaceManager().getAndActivateWorkspace(mmap, "M2");

    INDArray mArray = Nd4j.create(DOUBLE, 100);
    mArray.assign(10f);

    assertEquals(1000f, mArray.sumNumber().floatValue(), 1e-5);

    ws.close();


    ws.notifyScopeEntered();

    INDArray mArrayR = Nd4j.createUninitialized(DOUBLE, 100);
    assertEquals(1000f, mArrayR.sumNumber().floatValue(), 1e-5);

    ws.close();
}
 
Example #28
Source File: CudaWorkspaceManager.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Override
public MemoryWorkspace getWorkspaceForCurrentThread(@NonNull WorkspaceConfiguration configuration, @NonNull String id) {
    ensureThreadExistense();

    MemoryWorkspace workspace = backingMap.get().get(id);
    if (workspace == null) {
        workspace = new CudaWorkspace(configuration, id);
        backingMap.get().put(id, workspace);
        pickReference(workspace);
    }

    return workspace;
}
 
Example #29
Source File: DebugModeTests.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testBypassMode_1() {
    Nd4j.getWorkspaceManager().setDebugMode(DebugMode.BYPASS_EVERYTHING);

    val basicConfig = WorkspaceConfiguration.builder()
            .initialSize(0).maxSize(10 * 1024 * 1024).overallocationLimit(0.1)
            .policyAllocation(AllocationPolicy.STRICT).policyLearning(LearningPolicy.FIRST_LOOP)
            .policyMirroring(MirroringPolicy.FULL).policySpill(SpillPolicy.EXTERNAL).build();

    try (val ws = Nd4j.getWorkspaceManager().getAndActivateWorkspace(basicConfig, "R_119_1994")) {

        val array = Nd4j.create(10, 10).assign(1.0f);
        assertFalse(array.isAttached());
    }
}
 
Example #30
Source File: CpuWorkspaceManager.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Override
public MemoryWorkspace createNewWorkspace(@NonNull WorkspaceConfiguration configuration) {
    ensureThreadExistense();

    MemoryWorkspace workspace = new CpuWorkspace(configuration);

    backingMap.get().put(workspace.getId(), workspace);
    pickReference(workspace);

    return workspace;
}