org.nd4j.linalg.ops.transforms.Transforms Java Examples
The following examples show how to use
org.nd4j.linalg.ops.transforms.Transforms.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: LongTests.java From nd4j with Apache License 2.0 | 6 votes |
@Test public void testLongTadOp1() { double exp = Transforms.manhattanDistance(Nd4j.create(1000).assign(1.0), Nd4j.create(1000).assign(2.0)); INDArray hugeX = Nd4j.create(2200000, 1000).assign(1.0); INDArray hugeY = Nd4j.create(1, 1000).assign(2.0); for (int x = 0; x < hugeX.rows(); x++) { assertEquals("Failed at row " + x, 1000, hugeX.getRow(x).sumNumber().intValue()); } INDArray result = Nd4j.getExecutioner().exec(new ManhattanDistance(hugeX, hugeY, hugeX.lengthLong()), 1); for (int x = 0; x < hugeX.rows(); x++) { assertEquals(exp, result.getDouble(x), 1e-5); } }
Example #2
Source File: PCA.java From deeplearning4j with Apache License 2.0 | 6 votes |
/** * Return a reduced basis set that covers a certain fraction of the variance of the data * @param variance The desired fractional variance (0 to 1), it will always be greater than the value. * @return The basis vectors as columns, size <i>N</i> rows by <i>ndims</i> columns, where <i>ndims</i> is less than or equal to <i>N</i> */ public INDArray reducedBasis(double variance) { INDArray vars = Transforms.pow(eigenvalues, -0.5, true); double res = vars.sumNumber().doubleValue(); double total = 0.0; int ndims = 0; for (int i = 0; i < vars.columns(); i++) { ndims++; total += vars.getDouble(i); if (total / res > variance) break; } INDArray result = Nd4j.create(eigenvectors.rows(), ndims); for (int i = 0; i < ndims; i++) result.putColumn(i, eigenvectors.getColumn(i)); return result; }
Example #3
Source File: PCA.java From deeplearning4j with Apache License 2.0 | 6 votes |
/** * This method performs a dimensionality reduction, including principal components * that cover a fraction of the total variance of the system. It does all calculations * about the mean. * @param in A matrix of datapoints as rows, where column are features with fixed number N * @param variance The desired fraction of the total variance required * @return The reduced basis set */ public static INDArray pca2(INDArray in, double variance) { // let's calculate the covariance and the mean INDArray[] covmean = covarianceMatrix(in); // use the covariance matrix (inverse) to find "force constants" and then break into orthonormal // unit vector components INDArray[] pce = principalComponents(covmean[0]); // calculate the variance of each component INDArray vars = Transforms.pow(pce[1], -0.5, true); double res = vars.sumNumber().doubleValue(); double total = 0.0; int ndims = 0; for (int i = 0; i < vars.columns(); i++) { ndims++; total += vars.getDouble(i); if (total / res > variance) break; } INDArray result = Nd4j.create(in.columns(), ndims); for (int i = 0; i < ndims; i++) result.putColumn(i, pce[0].getColumn(i)); return result; }
Example #4
Source File: GlobalPoolingLayer.java From deeplearning4j with Apache License 2.0 | 6 votes |
private INDArray activateHelperFullArray(INDArray inputArray, int[] poolDim) { switch (poolingType) { case MAX: return inputArray.max(poolDim); case AVG: return inputArray.mean(poolDim); case SUM: return inputArray.sum(poolDim); case PNORM: //P norm: https://arxiv.org/pdf/1311.1780.pdf //out = (1/N * sum( |in| ^ p) ) ^ (1/p) int pnorm = layerConf().getPnorm(); INDArray abs = Transforms.abs(inputArray, true); Transforms.pow(abs, pnorm, false); INDArray pNorm = abs.sum(poolDim); return Transforms.pow(pNorm, 1.0 / pnorm, false); default: throw new RuntimeException("Unknown or not supported pooling type: " + poolingType + " " + layerId()); } }
Example #5
Source File: LossMAPE.java From deeplearning4j with Apache License 2.0 | 6 votes |
public INDArray scoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) { if(!labels.equalShapes(preOutput)){ Preconditions.throwEx("Labels and preOutput must have equal shapes: got shapes %s vs %s", labels.shape(), preOutput.shape()); } labels = labels.castTo(preOutput.dataType()); //No-op if already correct dtype INDArray scoreArr; INDArray output = activationFn.getActivation(preOutput.dup(), true); scoreArr = output.rsubi(labels).divi(labels); Transforms.abs(scoreArr, false); scoreArr.muli(100.0 / labels.size(1)); //Weighted loss function if (weights != null) { if (weights.length() != output.size(1)) { throw new IllegalStateException("Weights vector (length " + weights.length() + ") does not match output.size(1)=" + output.size(1)); } scoreArr.muliRowVector(weights.castTo(scoreArr.dataType())); } if (mask != null) { LossUtil.applyMask(scoreArr, mask); } return scoreArr; }
Example #6
Source File: ParagraphVectors.java From deeplearning4j with Apache License 2.0 | 6 votes |
/** * This method returns similarity of the document to specific label, based on mean value * * @param document * @param label * @return */ public double similarityToLabel(List<VocabWord> document, String label) { if (document.isEmpty()) throw new IllegalStateException("Document has no words inside"); /* INDArray arr = Nd4j.create(document.size(), this.layerSize); for (int i = 0; i < document.size(); i++) { arr.putRow(i, getWordVectorMatrix(document.get(i).getWord())); }*/ INDArray docMean = inferVector(document); //arr.mean(0); INDArray otherVec = getWordVectorMatrix(label); double sim = Transforms.cosineSim(docMean, otherVec); return sim; }
Example #7
Source File: PCA.java From nd4j with Apache License 2.0 | 6 votes |
/** * This method performs a dimensionality reduction, including principal components * that cover a fraction of the total variance of the system. It does all calculations * about the mean. * @param in A matrix of datapoints as rows, where column are features with fixed number N * @param variance The desired fraction of the total variance required * @return The reduced basis set */ public static INDArray pca2(INDArray in, double variance) { // let's calculate the covariance and the mean INDArray[] covmean = covarianceMatrix(in); // use the covariance matrix (inverse) to find "force constants" and then break into orthonormal // unit vector components INDArray[] pce = principalComponents(covmean[0]); // calculate the variance of each component INDArray vars = Transforms.pow(pce[1], -0.5, true); double res = vars.sumNumber().doubleValue(); double total = 0.0; int ndims = 0; for (int i = 0; i < vars.columns(); i++) { ndims++; total += vars.getDouble(i); if (total / res > variance) break; } INDArray result = Nd4j.create(in.columns(), ndims); for (int i = 0; i < ndims; i++) result.putColumn(i, pce[0].getColumn(i)); return result; }
Example #8
Source File: ParagraphVectors.java From deeplearning4j with Apache License 2.0 | 6 votes |
/** * Predict several labels based on the document. * Computes a similarity wrt the mean of the * representation of words in the document * @param document the document * @return possible labels in descending order */ public Collection<String> predictSeveral(List<VocabWord> document, int limit) { /* This code was transferred from original ParagraphVectors DL4j implementation, and yet to be tested */ if (document.isEmpty()) throw new IllegalStateException("Document has no words inside"); /* INDArray arr = Nd4j.create(document.size(), this.layerSize); for (int i = 0; i < document.size(); i++) { arr.putRow(i, getWordVectorMatrix(document.get(i).getWord())); } */ INDArray docMean = inferVector(document); //arr.mean(0); Counter<String> distances = new Counter<>(); for (String s : labelsSource.getLabels()) { INDArray otherVec = getWordVectorMatrix(s); double sim = Transforms.cosineSim(docMean, otherVec); log.debug("Similarity inside: [" + s + "] -> " + sim); distances.incrementCount(s, (float) sim); } val keys = distances.keySetSorted(); return keys.subList(0, Math.min(limit, keys.size())); }
Example #9
Source File: TestRandomProjection.java From nd4j with Apache License 2.0 | 6 votes |
@Test public void testEmbedding(){ INDArray z1 = Nd4j.randn(2000, 400); INDArray z2 = z1.dup(); INDArray result = Transforms.allEuclideanDistances(z1, z2, 1); RandomProjection rp = new RandomProjection(0.5); INDArray zp = rp.project(z1); INDArray zp2 = zp.dup(); INDArray projRes = Transforms.allEuclideanDistances(zp, zp2, 1); // check that the automatically tuned values for the density respect the // contract for eps: pairwise distances are preserved according to the // Johnson-Lindenstrauss lemma INDArray ratios = projRes.div(result); for (int i = 0; i < ratios.length(); i++){ double val = ratios.getDouble(i); // this avoids the NaNs we get along the diagonal if (val == val) { assertTrue(ratios.getDouble(i) < 1.5); } } }
Example #10
Source File: LossPoisson.java From nd4j with Apache License 2.0 | 6 votes |
public INDArray scoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) { if (labels.size(1) != preOutput.size(1)) { throw new IllegalArgumentException( "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer" + " number of outputs (nOut = " + preOutput.size(1) + ") "); } /* mean of (yhat - y * log(yhat)) */ //INDArray postOutput = Nd4j.utioner().execAndReturn(Nd4j.getOpFactory().createTransform(activationFn, preOutput.dup())); INDArray postOutput = activationFn.getActivation(preOutput.dup(), true); INDArray scoreArr = Transforms.log(postOutput); scoreArr.muli(labels); scoreArr = postOutput.sub(scoreArr); if (mask != null) { LossUtil.applyMask(scoreArr, mask); } return scoreArr; }
Example #11
Source File: NormalizerMinMaxScalerTest.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void testGivenMaxMin() { double tolerancePerc = 1; // 1% of correct value int nSamples = 500; int nFeatures = 3; Nd4j.getRandom().setSeed(12345); INDArray featureSet = Nd4j.rand(nSamples, nFeatures); INDArray labelSet = Nd4j.zeros(nSamples, 1); DataSet sampleDataSet = new DataSet(featureSet, labelSet); double givenMin = -1; double givenMax = 1; NormalizerMinMaxScaler myNormalizer = new NormalizerMinMaxScaler(givenMin, givenMax); DataSet transformed = sampleDataSet.copy(); myNormalizer.fit(sampleDataSet); myNormalizer.transform(transformed); myNormalizer.revert(transformed); INDArray delta = Transforms.abs(transformed.getFeatures().sub(sampleDataSet.getFeatures())) .div(sampleDataSet.getFeatures()); double maxdeltaPerc = delta.max(0, 1).mul(100).getDouble(0); System.out.println("Delta: " + maxdeltaPerc); assertTrue(maxdeltaPerc < tolerancePerc); }
Example #12
Source File: IndexingTestsC.java From nd4j with Apache License 2.0 | 5 votes |
@Test public void broadcastBug() throws Exception { INDArray a = Nd4j.create(new double[] {1.0, 2.0, 3.0, 4.0}, new int[] {2, 2}); final INDArray col = a.get(NDArrayIndex.all(), NDArrayIndex.point(0)); final INDArray aBad = col.broadcast(2, 2); final INDArray aGood = col.dup().broadcast(2, 2); System.out.println(aBad); System.out.println(aGood); assertTrue(Transforms.abs(aGood.sub(aBad).div(aGood)).maxNumber().doubleValue() < 0.01); }
Example #13
Source File: TransformsTest.java From nd4j with Apache License 2.0 | 5 votes |
@Test public void testXor1() { INDArray x = Nd4j.create(new double[] {0, 0, 1, 0, 0}); INDArray y = Nd4j.create(new double[] {0, 0, 1, 1, 0}); INDArray exp = Nd4j.create(new double[] {0, 0, 0, 1, 0}); INDArray z = Transforms.xor(x, y); assertEquals(exp, z); }
Example #14
Source File: TransformsTest.java From nd4j with Apache License 2.0 | 5 votes |
@Test public void testArrayMinMax() { INDArray x = Nd4j.create(new double[] {1, 3, 5, 7}); INDArray y = Nd4j.create(new double[] {2, 2, 6, 6}); INDArray xCopy = x.dup(); INDArray yCopy = y.dup(); INDArray expMax = Nd4j.create(new double[] {2, 3, 6, 7}); INDArray expMin = Nd4j.create(new double[] {1, 2, 5, 6}); INDArray z1 = Transforms.max(x, y, true); INDArray z2 = Transforms.min(x, y, true); assertEquals(expMax, z1); assertEquals(expMin, z2); // Assert that x was not modified assertEquals(xCopy, x); Transforms.max(x, y, false); // Assert that x was modified assertEquals(expMax, x); // Assert that y was not modified assertEquals(yCopy, y); // Reset the modified x x = xCopy.dup(); Transforms.min(x, y, false); // Assert that X was modified assertEquals(expMin, x); // Assert that y was not modified assertEquals(yCopy, y); }
Example #15
Source File: LogSoftMaxDerivative.java From nd4j with Apache License 2.0 | 5 votes |
@Override public void exec() { //TODO add dimension arg. For now: hardcoded along dimension 1... INDArray softmax = Transforms.softmax(x, true); INDArray mul = softmax.mul(y); INDArray summed = mul.sum(1); Nd4j.getExecutioner().exec(new BroadcastSubOp(y,summed,z,0)); }
Example #16
Source File: TestSerializationFloatToDouble.java From nd4j with Apache License 2.0 | 5 votes |
@Test public void testSerializationOnViewsJava() throws Exception { int length = 100; Nd4j.create(1); DataTypeUtil.setDTypeForContext(DataBuffer.Type.FLOAT); INDArray arr = Nd4j.linspace(1, length, length).reshape('c', 10, 10); INDArray sub = arr.get(NDArrayIndex.interval(5, 10), NDArrayIndex.interval(5, 10)); ByteArrayOutputStream baos = new ByteArrayOutputStream(); try (ObjectOutputStream oos = new ObjectOutputStream(baos)) { oos.writeObject(sub); } byte[] bytes = baos.toByteArray(); DataTypeUtil.setDTypeForContext(DataBuffer.Type.DOUBLE); System.out.println("The data opType is " + Nd4j.dataType()); INDArray arr1 = Nd4j.linspace(1, length, length).reshape('c', 10, 10); INDArray sub1 = arr1.get(NDArrayIndex.interval(5, 10), NDArrayIndex.interval(5, 10)); INDArray arr2; try (ObjectInputStream ois = new ObjectInputStream(new ByteArrayInputStream(bytes))) { arr2 = (INDArray) ois.readObject(); } //assertEquals(sub,arr2); assertTrue(Transforms.abs(sub1.sub(arr2).div(sub1)).maxNumber().doubleValue() < 0.01); }
Example #17
Source File: TestSerializationDoubleToFloat.java From nd4j with Apache License 2.0 | 5 votes |
@Test public void testSerializationOnViewsJava() throws Exception { int length = 100; Nd4j.create(1); DataTypeUtil.setDTypeForContext(DataBuffer.Type.DOUBLE); INDArray arr = Nd4j.linspace(1, length, length).reshape('c', 10, 10); INDArray sub = arr.get(NDArrayIndex.interval(5, 10), NDArrayIndex.interval(5, 10)); ByteArrayOutputStream baos = new ByteArrayOutputStream(); try (ObjectOutputStream oos = new ObjectOutputStream(baos)) { oos.writeObject(sub); } byte[] bytes = baos.toByteArray(); DataTypeUtil.setDTypeForContext(DataBuffer.Type.FLOAT); System.out.println("The data opType is " + Nd4j.dataType()); INDArray arr1 = Nd4j.linspace(1, length, length).reshape('c', 10, 10); INDArray sub1 = arr1.get(NDArrayIndex.interval(5, 10), NDArrayIndex.interval(5, 10)); INDArray arr2; try (ObjectInputStream ois = new ObjectInputStream(new ByteArrayInputStream(bytes))) { arr2 = (INDArray) ois.readObject(); } //assertEquals(sub,arr2); assertTrue(Transforms.abs(sub1.sub(arr2).div(sub1)).maxNumber().doubleValue() < 0.01); }
Example #18
Source File: TestSerializationDoubleToFloat.java From nd4j with Apache License 2.0 | 5 votes |
@Test public void testSerializationOnViewsNd4jWriteRead() throws Exception { int length = 100; Nd4j.create(1); DataTypeUtil.setDTypeForContext(DataBuffer.Type.DOUBLE); INDArray arr = Nd4j.linspace(1, length, length).reshape('c', 10, 10); INDArray sub = arr.get(NDArrayIndex.interval(5, 10), NDArrayIndex.interval(5, 10)); ByteArrayOutputStream baos = new ByteArrayOutputStream(); try (DataOutputStream dos = new DataOutputStream(baos)) { Nd4j.write(sub, dos); } byte[] bytes = baos.toByteArray(); //SET DATA TYPE TO FLOAT and initialize another array with the same contents //Nd4j.create(1); DataTypeUtil.setDTypeForContext(DataBuffer.Type.FLOAT); System.out.println("The data opType is " + Nd4j.dataType()); INDArray arr1 = Nd4j.linspace(1, length, length).reshape('c', 10, 10); INDArray sub1 = arr1.get(NDArrayIndex.interval(5, 10), NDArrayIndex.interval(5, 10)); INDArray arr2; try (DataInputStream dis = new DataInputStream(new ByteArrayInputStream(bytes))) { arr2 = Nd4j.read(dis); } //assertEquals(sub,arr2); assertTrue(Transforms.abs(sub1.sub(arr2).div(sub1)).maxNumber().doubleValue() < 0.01); }
Example #19
Source File: ActorCriticLoss.java From deeplearning4j with Apache License 2.0 | 5 votes |
private INDArray scoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) { INDArray output = activationFn.getActivation(preOutput.dup(), true).addi(1e-5); INDArray logOutput = Transforms.log(output, true); INDArray entropy = output.muli(logOutput); INDArray scoreArr = logOutput.muli(labels).subi(entropy.muli(BETA)); if (mask != null) { LossUtil.applyMask(scoreArr, mask); } return scoreArr; }
Example #20
Source File: SameDiffTests.java From nd4j with Apache License 2.0 | 5 votes |
@Test public void testEval() { SameDiff sameDiff = SameDiff.create(); INDArray arr = Nd4j.linspace(1, 4, 4); SDVariable x = sameDiff.var("x", arr); SDVariable sigmoid = sameDiff.sigmoid(x); INDArray assertion = Transforms.sigmoid(arr); INDArray[] eval = sameDiff.eval(Collections.singletonMap("x", arr)); assertEquals(assertion, eval[0]); }
Example #21
Source File: ComplexNDArrayUtil.java From nd4j with Apache License 2.0 | 5 votes |
/** * Center an array * * @param arr the arr to center * @param shape the shape of the array * @return the center portion of the array based on the * specified shape */ public static IComplexNDArray center(IComplexNDArray arr, long[] shape) { if (arr.length() < ArrayUtil.prod(shape)) return arr; for (int i = 0; i < shape.length; i++) if (shape[i] < 1) shape[i] = 1; INDArray shapeMatrix = NDArrayUtil.toNDArray(shape); INDArray currShape = NDArrayUtil.toNDArray(arr.shape()); INDArray startIndex = Transforms.floor(currShape.sub(shapeMatrix).divi(Nd4j.scalar(2))); INDArray endIndex = startIndex.add(shapeMatrix); INDArrayIndex[] indexes = Indices.createFromStartAndEnd(startIndex, endIndex); if (shapeMatrix.length() > 1) return arr.get(indexes); else { IComplexNDArray ret = Nd4j.createComplex(new int[] {(int) shapeMatrix.getDouble(0)}); int start = (int) startIndex.getDouble(0); int end = (int) endIndex.getDouble(0); int count = 0; for (int i = start; i < end; i++) { ret.putScalar(count++, arr.getComplex(i)); } return ret; } }
Example #22
Source File: PreProcessor3D4DTest.java From nd4j with Apache License 2.0 | 5 votes |
@Test public void testBruteForce3d() { NormalizerStandardize myNormalizer = new NormalizerStandardize(); NormalizerMinMaxScaler myMinMaxScaler = new NormalizerMinMaxScaler(); int timeSteps = 15; int samples = 100; //multiplier for the features INDArray featureScaleA = Nd4j.create(new double[] {1, -2, 3}).reshape(3, 1); INDArray featureScaleB = Nd4j.create(new double[] {2, 2, 3}).reshape(3, 1); Construct3dDataSet caseA = new Construct3dDataSet(featureScaleA, timeSteps, samples, 1); Construct3dDataSet caseB = new Construct3dDataSet(featureScaleB, timeSteps, samples, 1); myNormalizer.fit(caseA.sampleDataSet); assertEquals(caseA.expectedMean, myNormalizer.getMean()); assertTrue(Transforms.abs(myNormalizer.getStd().div(caseA.expectedStd).sub(1)).maxNumber().floatValue() < 0.01); myMinMaxScaler.fit(caseB.sampleDataSet); assertEquals(caseB.expectedMin, myMinMaxScaler.getMin()); assertEquals(caseB.expectedMax, myMinMaxScaler.getMax()); //Same Test with an Iterator, values should be close for std, exact for everything else DataSetIterator sampleIterA = new TestDataSetIterator(caseA.sampleDataSet, 5); DataSetIterator sampleIterB = new TestDataSetIterator(caseB.sampleDataSet, 5); myNormalizer.fit(sampleIterA); assertEquals(myNormalizer.getMean(), caseA.expectedMean); assertTrue(Transforms.abs(myNormalizer.getStd().div(caseA.expectedStd).sub(1)).maxNumber().floatValue() < 0.01); myMinMaxScaler.fit(sampleIterB); assertEquals(myMinMaxScaler.getMin(), caseB.expectedMin); assertEquals(myMinMaxScaler.getMax(), caseB.expectedMax); }
Example #23
Source File: MultiNormalizerMinMaxScalerTest.java From nd4j with Apache License 2.0 | 5 votes |
private double getMaxRelativeDifference(MultiDataSet a, MultiDataSet b) { double max = 0; for (int i = 0; i < a.getFeatures().length; i++) { INDArray inputA = a.getFeatures()[i]; INDArray inputB = b.getFeatures()[i]; INDArray delta = Transforms.abs(inputA.sub(inputB)).div(inputB); double maxdeltaPerc = delta.max(0, 1).mul(100).getDouble(0, 0); if (maxdeltaPerc > max) { max = maxdeltaPerc; } } return max; }
Example #24
Source File: NDArrayDistanceTransform.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Override public List<Writable> map(List<Writable> writables) { int idxFirst = inputSchema.getIndexOfColumn(firstCol); int idxSecond = inputSchema.getIndexOfColumn(secondCol); INDArray arr1 = ((NDArrayWritable) writables.get(idxFirst)).get(); INDArray arr2 = ((NDArrayWritable) writables.get(idxSecond)).get(); double d; switch (distance) { case COSINE: d = Transforms.cosineSim(arr1, arr2); break; case EUCLIDEAN: d = Transforms.euclideanDistance(arr1, arr2); break; case MANHATTAN: d = Transforms.manhattanDistance(arr1, arr2); break; default: throw new UnsupportedOperationException("Unknown or not supported distance metric: " + distance); } List<Writable> out = new ArrayList<>(writables.size() + 1); out.addAll(writables); out.add(new DoubleWritable(d)); return out; }
Example #25
Source File: ParagraphVectorsTest.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test @Ignore //AB 2020/02/06 - https://github.com/eclipse/deeplearning4j/issues/8677 public void testDirectInference() throws Exception { boolean isIntegration = isIntegrationTests(); File resource = Resources.asFile("/big/raw_sentences.txt"); SentenceIterator sentencesIter = getIterator(isIntegration, resource); ClassPathResource resource_mixed = new ClassPathResource("paravec/"); File local_resource_mixed = testDir.newFolder(); resource_mixed.copyDirectory(local_resource_mixed); SentenceIterator iter = new AggregatingSentenceIterator.Builder() .addSentenceIterator(sentencesIter) .addSentenceIterator(new FileSentenceIterator(local_resource_mixed)).build(); TokenizerFactory t = new DefaultTokenizerFactory(); t.setTokenPreProcessor(new CommonPreprocessor()); Word2Vec wordVectors = new Word2Vec.Builder().minWordFrequency(1).batchSize(250).iterations(1).epochs(1) .learningRate(0.025).layerSize(150).minLearningRate(0.001) .elementsLearningAlgorithm(new SkipGram<VocabWord>()).useHierarchicSoftmax(true).windowSize(5) .iterate(iter).tokenizerFactory(t).build(); wordVectors.fit(); ParagraphVectors pv = new ParagraphVectors.Builder().tokenizerFactory(t).iterations(10) .useHierarchicSoftmax(true).trainWordVectors(true).useExistingWordVectors(wordVectors) .negativeSample(0).sequenceLearningAlgorithm(new DM<VocabWord>()).build(); INDArray vec1 = pv.inferVector("This text is pretty awesome"); INDArray vec2 = pv.inferVector("Fantastic process of crazy things happening inside just for history purposes"); log.info("vec1/vec2: {}", Transforms.cosineSim(vec1, vec2)); }
Example #26
Source File: SameDiffTests.java From nd4j with Apache License 2.0 | 5 votes |
@Test public void testDenseLayerForwardPass() { Nd4j.getRandom().setSeed(12345); SameDiff sd = SameDiff.create(); INDArray iInput = Nd4j.rand(3, 4); INDArray iWeights = Nd4j.rand(4, 5); INDArray iBias = Nd4j.rand(1, 5); SDVariable input = sd.var("input", iInput); SDVariable weights = sd.var("weights", iWeights); SDVariable bias = sd.var("bias", iBias); SDVariable mmul = sd.mmul("mmul", input, weights); SDVariable z = mmul.add("z", bias); SDVariable out = sd.sigmoid("out", z); INDArray expMmul = iInput.mmul(iWeights); INDArray expZ = expMmul.addRowVector(iBias); INDArray expOut = Transforms.sigmoid(expZ, true); sd.exec(); assertEquals(expMmul, mmul.getArr()); assertEquals(expZ, z.getArr()); assertEquals(expOut, out.getArr()); }
Example #27
Source File: OpExecutionerTestsC.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testOneMinus() { INDArray in = Nd4j.linspace(1, 3, 3, DataType.DOUBLE); INDArray out = Transforms.timesOneMinus(in, true); //Expect: 0, -2, -6 -> from 1*(1-1), 2*(1-2), 3*(1-3). Getting: [0,0,0] INDArray exp = Nd4j.create(new double[] {0, -2.0, -6.0}); assertEquals(out, exp); }
Example #28
Source File: AutoRecLearner.java From jstarcraft-rns with Apache License 2.0 | 5 votes |
private INDArray scoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) { INDArray scoreArr; INDArray output = activationFn.getActivation(preOutput.dup(), true); INDArray yMinusyHat = Transforms.abs(labels.sub(output)); scoreArr = yMinusyHat.mul(yMinusyHat); scoreArr = scoreArr.mul(maskData); if (mask != null) { scoreArr.muliColumnVector(mask); } return scoreArr; }
Example #29
Source File: OpExecutionerTests.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testCosineSimilarity() { INDArray vec1 = Nd4j.create(new float[] {1, 2, 3, 4, 5}); INDArray vec2 = Nd4j.create(new float[] {1, 2, 3, 4, 5}); double sim = Transforms.cosineSim(vec1, vec2); assertEquals(getFailureMessage(), 1, sim, 1e-1); }
Example #30
Source File: TransformsTest.java From nd4j with Apache License 2.0 | 5 votes |
@Test public void testAnd1() { INDArray x = Nd4j.create(new double[] {0, 0, 1, 0, 0}); INDArray y = Nd4j.create(new double[] {0, 0, 1, 1, 0}); INDArray z = Transforms.and(x, y); assertEquals(x, z); }