org.nd4j.linalg.api.ndarray.INDArray Java Examples
The following examples show how to use
org.nd4j.linalg.api.ndarray.INDArray.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: UpdaterTest.java From nd4j with Apache License 2.0 | 6 votes |
@Test public void testAdaMax() { int rows = 10; int cols = 2; AdaMaxUpdater grad = new AdaMaxUpdater(new AdaMax()); grad.setStateViewArray(Nd4j.zeros(1, 2 * rows * cols), new long[] {rows, cols}, 'c', true); INDArray W = Nd4j.zeros(rows, cols); Distribution dist = Nd4j.getDistributions().createNormal(1e-3, 1e-3); for (int i = 0; i < W.rows(); i++) W.putRow(i, Nd4j.create(dist.sample(W.columns()))); for (int i = 0; i < 5; i++) { // String learningRates = String.valueOf("\nAdaMax\n " + grad.getGradient(W, i)).replaceAll(";", "\n"); // System.out.println(learningRates); W.addi(Nd4j.randn(rows, cols)); } }
Example #2
Source File: OCNNParamInitializer.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Override public Map<String, INDArray> getGradientsFromFlattened(NeuralNetConfiguration conf, INDArray gradientView) { org.deeplearning4j.nn.conf.ocnn.OCNNOutputLayer ocnnOutputLayer = ( org.deeplearning4j.nn.conf.ocnn.OCNNOutputLayer) conf.getLayer(); Map<String, INDArray> params = Collections.synchronizedMap(new LinkedHashMap<String, INDArray>()); val nIn = ocnnOutputLayer.getNIn(); val hiddenLayer = ocnnOutputLayer.getHiddenSize(); val firstLayerWeightLength = hiddenLayer; val secondLayerLength = nIn * hiddenLayer; INDArray weightView = gradientView.get(point(0),interval(0, firstLayerWeightLength)) .reshape('f',1,hiddenLayer); INDArray vView = gradientView.get(point(0), NDArrayIndex.interval(firstLayerWeightLength,firstLayerWeightLength + secondLayerLength)) .reshape('f',nIn,hiddenLayer); params.put(W_KEY, weightView); params.put(V_KEY,vView); params.put(R_KEY,gradientView.get(point(0),point(gradientView.length() - 1))); return params; }
Example #3
Source File: TwoPointApproximationTest.java From nd4j with Apache License 2.0 | 6 votes |
@Test public void testLinspaceDerivative() throws Exception { String basePath = "/two_points_approx_deriv_numpy/"; INDArray linspace = Nd4j.createNpyFromInputStream(new ClassPathResource(basePath + "x.npy").getInputStream()); INDArray yLinspace = Nd4j.createNpyFromInputStream(new ClassPathResource(basePath + "y.npy").getInputStream()); Function<INDArray,INDArray> f = new Function<INDArray, INDArray>() { @Override public INDArray apply(INDArray indArray) { return indArray.add(1); } }; INDArray test = TwoPointApproximation .approximateDerivative(f,linspace,null,yLinspace, Nd4j.create(new double[] {Float.MIN_VALUE ,Float.MAX_VALUE})); INDArray npLoad = Nd4j.createNpyFromInputStream(new ClassPathResource(basePath + "approx_deriv_small.npy").getInputStream()); assertEquals(npLoad,test); System.out.println(test); }
Example #4
Source File: EvaluationBinaryTest.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void testTimeSeriesEval() { int[] shape = {2, 4, 3}; Nd4j.getRandom().setSeed(12345); INDArray labels = Nd4j.getExecutioner().exec(new BernoulliDistribution(Nd4j.createUninitialized(shape), 0.5)); INDArray predicted = Nd4j.rand(shape); INDArray mask = Nd4j.getExecutioner().exec(new BernoulliDistribution(Nd4j.createUninitialized(shape), 0.5)); EvaluationBinary eb1 = new EvaluationBinary(); eb1.eval(labels, predicted, mask); EvaluationBinary eb2 = new EvaluationBinary(); for (int i = 0; i < shape[2]; i++) { INDArray l = labels.get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.point(i)); INDArray p = predicted.get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.point(i)); INDArray m = mask.get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.point(i)); eb2.eval(l, p, m); } assertEquals(eb2.stats(), eb1.stats()); }
Example #5
Source File: LossL1.java From nd4j with Apache License 2.0 | 5 votes |
public INDArray scoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) { if (labels.size(1) != preOutput.size(1)) { throw new IllegalArgumentException( "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer" + " number of outputs (nOut = " + preOutput.size(1) + ") "); } INDArray scoreArr; //INDArray output = Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform(activationFn, preOutput.dup())); INDArray output = activationFn.getActivation(preOutput.dup(), true); scoreArr = output.subi(labels); Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform("abs", scoreArr)); //Weighted loss function if (weights != null) { if (weights.length() != output.size(1)) { throw new IllegalStateException("Weights vector (length " + weights.length() + ") does not match output.size(1)=" + output.size(1)); } scoreArr.muliRowVector(weights); } if (mask != null) { LossUtil.applyMask(scoreArr, mask); } return scoreArr; }
Example #6
Source File: BaseLapack.java From nd4j with Apache License 2.0 | 5 votes |
@Override public void potrf(INDArray A, boolean lower) { // FIXME: int cast if (A.columns() > Integer.MAX_VALUE) throw new ND4JArraySizeException(); byte uplo = (byte) (lower ? 'L' : 'U'); // upper or lower part of the factor desired ? int n = (int) A.columns(); INDArray INFO = Nd4j.createArrayFromShapeBuffer(Nd4j.getDataBufferFactory().createInt(1), Nd4j.getShapeInfoProvider().createShapeInformation(new int[] {1, 1}).getFirst()); if (A.data().dataType() == DataBuffer.Type.DOUBLE) dpotrf(uplo, n, A, INFO); else if (A.data().dataType() == DataBuffer.Type.FLOAT) spotrf(uplo, n, A, INFO); else throw new UnsupportedOperationException(); if (INFO.getInt(0) < 0) { throw new Error("Parameter #" + INFO.getInt(0) + " to potrf() was not valid"); } else if (INFO.getInt(0) > 0) { throw new Error("The matrix is not positive definite! (potrf fails @ order " + INFO.getInt(0) + ")"); } return; }
Example #7
Source File: LossMixtureDensity.java From nd4j with Apache License 2.0 | 5 votes |
/** * This method calculates 'phi' which is the probability * density function (see Bishop 23) * @param diffSquared This is the 'x-mu' term of the Gaussian distribution (distance between 'x' and the mean value of the distribution). * @param sigma This is the standard deviation of the Gaussian distribution. * @return This returns an array of shape [nsamples, nlabels, ndistributions] which contains the probability density (phi) for each of the * samples * labels * distributions for the given x, sigma, mu. */ private INDArray phi(INDArray diffSquared, INDArray sigma) { // 1/sqrt(2PIs^2) * e^((in-u)^2/2*s^2) INDArray minustwovariance = sigma.mul(sigma).muli(2).negi(); // This is phi_i(x,mu,sigma) INDArray likelihoods = Transforms.exp(diffSquared.divi(minustwovariance)) .divi(Transforms.pow(sigma.mul(SQRT_TWO_PI), (double) mLabelWidth)); return likelihoods; }
Example #8
Source File: BinomialDistribution.java From deeplearning4j with Apache License 2.0 | 5 votes |
/** * This op fills Z with binomial distribution over given trials with single given probability for all trials * @param z * @param trials * @param probability */ public BinomialDistribution(@NonNull INDArray z, int trials, double probability) { super(z, z, z); this.trials = trials; this.probability = probability; this.extraArgs = new Object[] {(double) this.trials, this.probability}; }
Example #9
Source File: BasicBroadcastTests.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void basicBroadcastTest_1() { val x = Nd4j.create(DataType.FLOAT, 3, 5); val y = Nd4j.createFromArray(new float[]{1.f, 1.f, 1.f, 1.f, 1.f}); val e = Nd4j.create(DataType.FLOAT, 3, 5).assign(1.f); // inplace setup val op = new AddOp(new INDArray[]{x, y}, new INDArray[]{x}); Nd4j.exec(op); assertEquals(e, x); }
Example #10
Source File: Tsne.java From deeplearning4j with Apache License 2.0 | 5 votes |
/** * Computes a gaussian kernel * given a vector of squared distance distances * * @param d the data * @param beta * @return */ public Pair<Double, INDArray> hBeta(INDArray d, double beta) { INDArray P = exp(d.neg().muli(beta)); double sumP = P.sumNumber().doubleValue(); double logSumP = FastMath.log(sumP); Double H = logSumP + ((beta * (d.mul(P).sumNumber().doubleValue())) / sumP); P.divi(sumP); return new Pair<>(H, P); }
Example #11
Source File: OpExecutionerTestsC.java From nd4j with Apache License 2.0 | 5 votes |
@Test public void testSum6d() { INDArray arr6 = Nd4j.ones(1, 1, 4, 4, 4, 4); INDArray arr6s = arr6.sum(2, 3); for (int i = 0; i < arr6s.length(); i++) assertEquals(16, arr6s.getDouble(i), 1e-1); }
Example #12
Source File: SameDiffTests.java From nd4j with Apache License 2.0 | 5 votes |
@Test public void testDup() { SameDiff sameDiff = SameDiff.create(); INDArray arr = Transforms.sigmoid(Nd4j.linspace(1, 8, 8)).reshape(2, 2, 2); SDVariable x = sameDiff.var("x", arr); SDVariable y = sameDiff.var("y", arr); SameDiff tg2 = sameDiff.dup(); }
Example #13
Source File: ClusterUtils.java From deeplearning4j with Apache License 2.0 | 5 votes |
public static ReduceOp createDistanceFunctionOp(Distance distanceFunction, INDArray x, INDArray y){ switch (distanceFunction){ case COSINE_DISTANCE: return new CosineDistance(x,y); case COSINE_SIMILARITY: return new CosineSimilarity(x,y); case DOT: return new Dot(x,y); case EUCLIDEAN: return new EuclideanDistance(x,y); case JACCARD: return new JaccardDistance(x,y); case MANHATTAN: return new ManhattanDistance(x,y); default: throw new IllegalStateException("Unknown distance function: " + distanceFunction); } }
Example #14
Source File: ImageTransformProcessStepRunner.java From konduit-serving with Apache License 2.0 | 5 votes |
private INDArray permuteImageOrder(INDArray input) { if (!imageLoadingStepConfig.initialImageLayoutMatchesFinal()) { return ImagePermuter.permuteOrder(input, imageLoadingStepConfig.getImageProcessingInitialLayout(), imageLoadingStepConfig.getImageProcessingRequiredLayout()); } else { return input; } }
Example #15
Source File: ElementWiseVertexTest.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testElementWiseVertexForwardAdd() { int batchsz = 24; int featuresz = 17; ComputationGraphConfiguration cgc = new NeuralNetConfiguration.Builder().graphBuilder() .addInputs("input1", "input2", "input3") .addLayer("denselayer", new DenseLayer.Builder().nIn(featuresz).nOut(1).activation(Activation.IDENTITY) .build(), "input1") /* denselayer is not actually used, but it seems that you _need_ to have trainable parameters, otherwise, you get * Invalid shape: Requested INDArray shape [1, 0] contains dimension size values < 1 (all dimensions must be 1 or more) * at org.nd4j.linalg.factory.Nd4j.checkShapeValues(Nd4j.java:4877) * at org.nd4j.linalg.factory.Nd4j.create(Nd4j.java:4867) * at org.nd4j.linalg.factory.Nd4j.create(Nd4j.java:4820) * at org.nd4j.linalg.factory.Nd4j.create(Nd4j.java:3948) * at org.deeplearning4j.nn.graph.ComputationGraph.init(ComputationGraph.java:409) * at org.deeplearning4j.nn.graph.ComputationGraph.init(ComputationGraph.java:341) */ .addVertex("elementwiseAdd", new ElementWiseVertex(ElementWiseVertex.Op.Add), "input1", "input2", "input3") .addLayer("Add", new ActivationLayer.Builder().activation(Activation.IDENTITY).build(), "elementwiseAdd") .setOutputs("Add", "denselayer").build(); ComputationGraph cg = new ComputationGraph(cgc); cg.init(); INDArray input1 = Nd4j.rand(batchsz, featuresz); INDArray input2 = Nd4j.rand(batchsz, featuresz); INDArray input3 = Nd4j.rand(batchsz, featuresz); INDArray target = input1.dup().addi(input2).addi(input3); INDArray output = cg.output(input1, input2, input3)[0]; INDArray squared = output.sub(target.castTo(output.dataType())); double rms = squared.mul(squared).sumNumber().doubleValue(); Assert.assertEquals(0.0, rms, this.epsilon); }
Example #16
Source File: LossFunctionTestCase.java From jstarcraft-rns with Apache License 2.0 | 5 votes |
@Test public void testScore() throws Exception { EnvironmentContext context = EnvironmentFactory.getContext(); Future<?> task = context.doTask(() -> { LinkedList<KeyValue<IActivation, ActivationFunction>> activetionList = new LinkedList<>(); activetionList.add(new KeyValue<>(new ActivationSigmoid(), new SigmoidActivationFunction())); // activetionList.add(new KeyValue<>(new ActivationSoftmax(), new SoftMaxActivationFunction())); for (KeyValue<IActivation, ActivationFunction> keyValue : activetionList) { INDArray array = Nd4j.linspace(-2.5D, 2.0D, 10).reshape(5, 2); INDArray marks = Nd4j.create(new double[] { 0D, 1D, 0D, 1D, 0D, 1D, 0D, 1D, 0D, 1D }).reshape(5, 2); ILossFunction oldFunction = getOldFunction(marks); double value = oldFunction.computeScore(marks, array.dup(), keyValue.getKey(), null, false); Nd4jMatrix input = getMatrix(array.dup()); Nd4jMatrix output = new Nd4jMatrix(Nd4j.zeros(input.getRowSize(), input.getColumnSize())); ActivationFunction function = keyValue.getValue(); function.forward(input, output); LossFunction newFunction = getNewFunction(marks, function); newFunction.doCache(getMatrix(marks), output); double score = newFunction.computeScore(getMatrix(marks), output, null); System.out.println(value); System.out.println(score); if (Math.abs(value - score) > MathUtility.EPSILON) { Assert.fail(); } } }); task.get(); }
Example #17
Source File: TestInvertMatrices.java From nd4j with Apache License 2.0 | 5 votes |
/** * Check the Moore-Penrose conditions for pseudo-matrices. * * @param A Initial matrix * @param B Pseudo-Inverse of {@code A} * @param precision Precision when comparing matrix elements */ private void checkMoorePenroseConditions(INDArray A, INDArray B, double precision) { // ABA=A (AB need not be the general identity matrix, but it maps all column vectors of A to themselves) assertTrue(A.equalsWithEps(A.mmul(B).mmul(A), precision)); // BAB=B (B is a weak inverse for the multiplicative semigroup) assertTrue(B.equalsWithEps(B.mmul(A).mmul(B), precision)); // (AB)^T=AB (AB is Hermitian) assertTrue((A.mmul(B)).transpose().equalsWithEps(A.mmul(B), precision)); // (BA)^T=BA (BA is also Hermitian) assertTrue((B.mmul(A)).transpose().equalsWithEps(B.mmul(A), precision)); }
Example #18
Source File: NormalizerTests.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Before public void randomData() { Nd4j.getRandom().setSeed(12345); batchSize = 13; batchCount = 20; lastBatch = batchSize / 2; INDArray origFeatures = Nd4j.rand(batchCount * batchSize + lastBatch, 10); INDArray origLabels = Nd4j.rand(batchCount * batchSize + lastBatch, 3); data = new DataSet(origFeatures, origLabels); stdScaler = new NormalizerStandardize(); minMaxScaler = new NormalizerMinMaxScaler(); }
Example #19
Source File: GemvParameters.java From nd4j with Apache License 2.0 | 5 votes |
private INDArray copyIfNecessary(INDArray arr) { //See also: Shape.toMmulCompatible - want same conditions here and there //Check if matrix values are contiguous in memory. If not: dup //Contiguous for c if: stride[0] == shape[1] and stride[1] = 1 //Contiguous for f if: stride[0] == 1 and stride[1] == shape[0] if (arr.ordering() == 'c' && (arr.stride(0) != arr.size(1) || arr.stride(1) != 1)) return arr.dup(); else if (arr.ordering() == 'f' && (arr.stride(0) != 1 || arr.stride(1) != arr.size(0))) return arr.dup(); else if (arr.elementWiseStride() < 1) return arr.dup(); return arr; }
Example #20
Source File: CudaIndexReduceTests.java From nd4j with Apache License 2.0 | 5 votes |
@Test public void testIMax4() { INDArray array1 = Nd4j.linspace(1, 1000, 128000).reshape(128, 1000); long time1 = System.currentTimeMillis(); INDArray argMax = Nd4j.argMax(array1, 0,1); long time2 = System.currentTimeMillis(); System.out.println("Execution time: " + (time2 - time1)); assertEquals(127999f, argMax.getFloat(0), 0.001f); }
Example #21
Source File: SameDiffTests.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testVariableRenaming2() { SameDiff sd = SameDiff.create(); SDVariable v1 = sd.placeHolder("x", DataType.FLOAT, 3, 4); SDVariable v2 = sd.var("y", Nd4j.rand(DataType.FLOAT, 4, 5)); SDVariable v3 = v1.mmul("oldName", v2); SDVariable v4 = v3.std("out", false); INDArray out = sd.outputSingle(Collections.singletonMap("x", Nd4j.rand(DataType.FLOAT, 3, 4)), "out"); sd.setTrainingConfig(TrainingConfig.builder() .updater(new Adam(1e-3)) .dataSetFeatureMapping("x") .markLabelsUnused() .build()); sd.fit(new DataSet(Nd4j.rand(DataType.FLOAT, 3, 4), null)); v3.rename("newName"); sd.fit(new DataSet(Nd4j.rand(DataType.FLOAT, 3, 4), null)); }
Example #22
Source File: Dl4jMlpClassifier.java From wekaDeeplearning4j with GNU General Public License v3.0 | 5 votes |
/** * Get a peak at the features of the {@code iterator}'s first batch using the given instances. * * @return Features of the first batch */ protected INDArray getFirstBatchFeatures(Instances data) throws Exception { final DataSetIterator it = getDataSetIterator(data, CacheMode.NONE); if (!it.hasNext()) { throw new RuntimeException("Iterator was unexpectedly empty."); } final INDArray features = Utils.getNext(it).getFeatures(); it.reset(); return features; }
Example #23
Source File: KDTree.java From deeplearning4j with Apache License 2.0 | 5 votes |
private void knn(KDNode node, HyperRect rect, List<Pair<Float, INDArray>> best, int _disc) { if (node == null || rect == null || rect.minDistance(currentPoint, minDistance) > currentDistance) return; int _discNext = (_disc + 1) % dims; float distance = Nd4j.getExecutioner().execAndReturn(new EuclideanDistance(currentPoint,node.point, minDistance)).getFinalResult() .floatValue(); if (distance <= currentDistance) { best.add(Pair.of(distance, node.getPoint())); } HyperRect lower = rect.getLower(node.point, _disc); HyperRect upper = rect.getUpper(node.point, _disc); knn(node.getLeft(), lower, best, _discNext); knn(node.getRight(), upper, best, _discNext); }
Example #24
Source File: TestNDArrayToWritablesFunction.java From DataVec with Apache License 2.0 | 5 votes |
@Test public void testNDArrayToWritablesArray() throws Exception { INDArray arr = Nd4j.arange(5); List<Writable> expected = Arrays.asList((Writable) new NDArrayWritable(arr)); List<Writable> actual = new NDArrayToWritablesFunction(true).call(arr); assertEquals(expected, actual); }
Example #25
Source File: OpExecutionerTests.java From nd4j with Apache License 2.0 | 5 votes |
@Test public void testDescriptiveStats() { OpExecutioner opExecutioner = Nd4j.getExecutioner(); INDArray x = Nd4j.linspace(1, 5, 5); Mean mean = new Mean(x); opExecutioner.exec(mean); assertEquals(getFailureMessage(), 3.0, mean.getFinalResult().doubleValue(), 1e-1); Variance variance = new Variance(x.dup(), true); opExecutioner.exec(variance); assertEquals(getFailureMessage(), 2.5, variance.getFinalResult().doubleValue(), 1e-1); }
Example #26
Source File: RepeatVectorTest.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testRepeatVector() { double[] arr = new double[] {1., 2., 3., 1., 2., 3., 1., 2., 3., 1., 2., 3.}; INDArray expectedOut = Nd4j.create(arr, new long[] {1, 3, REPEAT}, 'f'); INDArray input = Nd4j.create(new double[] {1., 2., 3.}, new long[] {1, 3}); Layer layer = getRepeatVectorLayer(); INDArray output = layer.activate(input, false, LayerWorkspaceMgr.noWorkspaces()); assertTrue(Arrays.equals(expectedOut.shape(), output.shape())); assertEquals(expectedOut, output); INDArray epsilon = Nd4j.ones(1,3,4); Pair<Gradient, INDArray> out = layer.backpropGradient(epsilon, LayerWorkspaceMgr.noWorkspaces()); INDArray outEpsilon = out.getSecond(); INDArray expectedEpsilon = Nd4j.create(new double[] {4., 4., 4.}, new long[] {1, 3}); assertEquals(expectedEpsilon, outEpsilon); }
Example #27
Source File: CpuSparseNDArrayFactory.java From nd4j with Apache License 2.0 | 5 votes |
@Override public INDArray sort(INDArray x, boolean descending) { if (x.isScalar()) return x; if (x.data().dataType() == DataBuffer.Type.FLOAT) { NativeOpsHolder.getInstance().getDeviceNativeOps().sortFloat(null, (FloatPointer) x.data().addressPointer(), (LongPointer) x.shapeInfoDataBuffer().addressPointer(), descending); } else if (x.data().dataType() == DataBuffer.Type.DOUBLE) { NativeOpsHolder.getInstance().getDeviceNativeOps().sortDouble(null, (DoublePointer) x.data().addressPointer(), (LongPointer) x.shapeInfoDataBuffer().addressPointer(), descending); } else { throw new UnsupportedOperationException("Unknown dataype " + x.data().dataType()); } return x; }
Example #28
Source File: AbstractCompressor.java From nd4j with Apache License 2.0 | 5 votes |
/** * Inplace compression of INDArray * * @param array */ @Override public void compressi(INDArray array) { // TODO: lift this restriction if (array.isView()) throw new UnsupportedOperationException("Impossible to apply inplace compression on View"); array.setData(compress(array.data())); array.markAsCompressed(true); }
Example #29
Source File: LossSquaredHinge.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Override public INDArray computeScoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) { INDArray scoreArr = scoreArray(labels, preOutput, activationFn, mask); BooleanIndexing.replaceWhere(scoreArr, 0.0, Conditions.lessThan(0.0));//max(0,1-y*yhat) scoreArr.muli(scoreArr); return scoreArr.sum(true,1); }
Example #30
Source File: NearestNeighborTest.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void vpTreeTest() throws Exception { INDArray matrix = Nd4j.rand(new int[] {400,10}); INDArray rowVector = matrix.getRow(70); INDArray resultArr = Nd4j.zeros(400,1); Executor executor = Executors.newSingleThreadExecutor(); VPTree vpTree = new VPTree(matrix); System.out.println("Ran!"); }