Available Methods
- putScalar ( )
- length ( )
- dup ( )
- get ( )
- size ( )
- shape ( )
- assign ( )
- rows ( )
- getDouble ( )
- reshape ( )
- addi ( )
- muli ( )
- rank ( )
- mmul ( )
- columns ( )
- put ( )
- lengthLong ( )
- ordering ( )
- sum ( )
- mul ( )
- putRow ( )
- getRow ( )
- divi ( )
- castTo ( )
- permute ( )
- mean ( )
- add ( )
- slice ( )
- sub ( )
- tensorAlongDimension ( )
- muliColumnVector ( )
- dataType ( )
- addiRowVector ( )
- transpose ( )
- isVector ( )
- subi ( )
- getInt ( )
- isView ( )
- isScalar ( )
- subiRowVector ( )
- std ( )
- getFloat ( )
- diviColumnVector ( )
- stride ( )
- slices ( )
- max ( )
- muliRowVector ( )
- data ( )
- getColumn ( )
- equalShapes ( )
- div ( )
- rsubi ( )
- equals ( )
- rsub ( )
- isEmpty ( )
- var ( )
- elementWiseStride ( )
- markAsCompressed ( )
- norm2 ( )
- mmuli ( )
- getColumns ( )
- linearView ( )
- diviRowVector ( )
- isRowVector ( )
- isSparse ( )
- like ( )
- ulike ( )
- offset ( )
- unsafeDuplication ( )
- putSlice ( )
- isRowVectorOrScalar ( )
- equalsWithEps ( )
- rdivi ( )
- addiColumnVector ( )
- setData ( )
- gt ( )
- rdiv ( )
- tensorsAlongDimension ( )
- toString ( )
- vectorsAlongDimension ( )
- mulColumnVector ( )
- detach ( )
- close ( )
- tensorssAlongDimension ( )
- putColumn ( )
- javaTensorAlongDimension ( )
- isCompressed ( )
- norm1 ( )
- getRows ( )
- ravel ( )
- broadcast ( )
- isMatrix ( )
- mulRowVector ( )
- addColumnVector ( )
- toFloatVector ( )
- dimShuffle ( )
- isR ( )
- min ( )
- negi ( )
- isS ( )
- addRowVector ( )
- vectorAlongDimension ( )
- isColumnVectorOrScalar ( )
- isAttached ( )
- distance2 ( )
- shapeInfoDataBuffer ( )
Related Classes
- java.util.Arrays
- java.io.File
- java.util.Collections
- java.util.Random
- java.nio.ByteBuffer
- org.junit.Ignore
- org.apache.spark.api.java.JavaRDD
- org.nd4j.linalg.factory.Nd4j
- org.deeplearning4j.nn.conf.NeuralNetConfiguration
- org.deeplearning4j.nn.weights.WeightInit
- org.deeplearning4j.nn.multilayer.MultiLayerNetwork
- org.deeplearning4j.nn.conf.layers.DenseLayer
- org.deeplearning4j.nn.conf.layers.OutputLayer
- org.nd4j.linalg.activations.Activation
- org.deeplearning4j.nn.conf.MultiLayerConfiguration
- org.nd4j.linalg.lossfunctions.LossFunctions
- org.nd4j.linalg.dataset.DataSet
- org.deeplearning4j.nn.api.OptimizationAlgorithm
- org.nd4j.linalg.dataset.api.iterator.DataSetIterator
- org.deeplearning4j.nn.graph.ComputationGraph
- org.nd4j.linalg.ops.transforms.Transforms
- org.deeplearning4j.nn.conf.inputs.InputType
- org.nd4j.linalg.indexing.NDArrayIndex
- org.nd4j.linalg.learning.config.Adam
- org.deeplearning4j.nn.conf.ComputationGraphConfiguration
Java Code Examples for org.nd4j.linalg.api.ndarray.INDArray#isS()
The following examples show how to use
org.nd4j.linalg.api.ndarray.INDArray#isS() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: CudaAffinityManager.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Override public void ensureLocation(INDArray array, Location location) { // to location to ensure for empty array if (array.isEmpty() || array.isS()) return; // let's make sure host pointer actually exists ((BaseCudaDataBuffer) array.data()).lazyAllocateHostPointer(); val point = AtomicAllocator.getInstance().getAllocationPoint(array); switch (location) { case HOST: { AtomicAllocator.getInstance().synchronizeHostData(array); } break; case DEVICE:{ AtomicAllocator.getInstance().getFlowController().synchronizeToDevice(point); } break; case EVERYWHERE: default: { AtomicAllocator.getInstance().synchronizeHostData(array); AtomicAllocator.getInstance().getFlowController().synchronizeToDevice(point); } } }
Example 2
Source File: AtomicAllocator.java From deeplearning4j with Apache License 2.0 | 5 votes |
/** * This method returns actual device pointer valid for specified INDArray * * @param array */ @Override public Pointer getPointer(INDArray array, CudaContext context) { // DataBuffer buffer = array.data().originalDataBuffer() == null ? array.data() : array.data().originalDataBuffer(); if (array.isEmpty() || array.isS()) throw new UnsupportedOperationException("Pew-pew"); return memoryHandler.getDevicePointer(array.data(), context); }
Example 3
Source File: AtomicAllocator.java From deeplearning4j with Apache License 2.0 | 5 votes |
/** * This method should be called to make sure that data on host side is actualized * * @param array */ @Override public void synchronizeHostData(INDArray array) { if (array.isEmpty() || array.isS()) return; val buffer = array.data().originalDataBuffer() == null ? array.data() : array.data().originalDataBuffer(); synchronizeHostData(buffer); }