Java Code Examples for org.nd4j.common.primitives.Pair#setFirst()
The following examples show how to use
org.nd4j.common.primitives.Pair#setFirst() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: CentersHolder.java From deeplearning4j with Apache License 2.0 | 6 votes |
public synchronized Pair<Double, Long> getCenterByMinDistance(Point point, Distance distanceFunction) { if (distances == null) distances = Nd4j.create(centers.dataType(), centers.rows()); if (argMin == null) argMin = Nd4j.createUninitialized(DataType.LONG, new long[0]); if (op == null) { op = ClusterUtils.createDistanceFunctionOp(distanceFunction, centers, point.getArray(), 1); imin = new ArgMin(distances, argMin); op.setZ(distances); } op.setY(point.getArray()); Nd4j.getExecutioner().exec(op); Nd4j.getExecutioner().exec(imin); Pair<Double, Long> result = new Pair<>(); result.setFirst(distances.getDouble(argMin.getLong(0))); result.setSecond(argMin.getLong(0)); return result; }
Example 2
Source File: TimeDistributedLayer.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Override public Pair<INDArray, MaskState> feedForwardMaskArray(INDArray maskArray, MaskState currentMaskState, int minibatchSize) { if(maskArray == null){ return underlying.feedForwardMaskArray(null, currentMaskState, minibatchSize); } else { INDArray reshaped = TimeSeriesUtils.reshapeTimeSeriesMaskToVector(maskArray, LayerWorkspaceMgr.noWorkspaces(), ArrayType.ACTIVATIONS); Pair<INDArray, MaskState> p = underlying.feedForwardMaskArray(reshaped, currentMaskState, minibatchSize); if(p == null || p.getFirst() == null){ return p; } INDArray reshaped2 = TimeSeriesUtils.reshapeVectorToTimeSeriesMask(p.getFirst(), (int)maskArray.size(0)); p.setFirst(reshaped2); return p; } }
Example 3
Source File: Convolution1DLayer.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Override protected Pair<INDArray, INDArray> preOutput4d(boolean training, boolean forBackprop, LayerWorkspaceMgr workspaceMgr) { Pair<INDArray,INDArray> preOutput = super.preOutput(true, forBackprop, workspaceMgr); INDArray p3d = preOutput.getFirst(); INDArray p = preOutput.getFirst().reshape(p3d.size(0), p3d.size(1), p3d.size(2), 1); preOutput.setFirst(p); return preOutput; }
Example 4
Source File: FastText.java From deeplearning4j with Apache License 2.0 | 5 votes |
public Pair<String, Float> predictProbability(String text) { assertModelLoaded(); JFastText.ProbLabel predictedProbLabel = fastTextImpl.predictProba(text); Pair<String,Float> retVal = new Pair<>(); retVal.setFirst(predictedProbLabel.label); retVal.setSecond(predictedProbLabel.logProb); return retVal; }
Example 5
Source File: HyperRect.java From deeplearning4j with Apache License 2.0 | 5 votes |
public static Pair<float[],float[]> point(INDArray vector) { Pair<float[],float[]> ret = new Pair<>(); float[] curr = new float[(int)vector.length()]; for (int i = 0; i < vector.length(); i++) { curr[i] = vector.getFloat(i); } ret.setFirst(curr); ret.setSecond(curr); return ret; }
Example 6
Source File: SameDiffLoss.java From deeplearning4j with Apache License 2.0 | 3 votes |
/** * Compute both the score (loss function value) and gradient. This is equivalent to calling {@link #computeScore(INDArray, INDArray, IActivation, INDArray, boolean)} * and {@link #computeGradient(INDArray, INDArray, IActivation, INDArray)} individually * * @param labels Label/expected output * @param preOutput Output of the model (neural network) * @param activationFn Activation function that should be applied to preOutput * @param mask Mask array; may be null * @param average Whether the score should be averaged (divided by number of rows in labels/output) or not * @return The score (loss function value) and gradient */ @Override public Pair<Double, INDArray> computeGradientAndScore(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask, boolean average) { Pair<Double, INDArray> GradientAndScore = new Pair<>(); GradientAndScore.setFirst(this.computeScore(labels, preOutput, activationFn, mask, average)); GradientAndScore.setSecond(this.computeGradient(labels, preOutput, activationFn, mask)); return GradientAndScore; }