Java Code Examples for org.opencv.core.CvType#CV_32F
The following examples show how to use
org.opencv.core.CvType#CV_32F .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestUtils.java From go-bees with GNU General Public License v3.0 | 6 votes |
/** * Checks if two OpenCV Mats are equal. * The matrices must be equal size and type. * Floating-point mats are not supported. * * @param expected expected mat. * @param actual actual mat. * @return true if they are equal. */ private static boolean equals(Mat expected, Mat actual) { if (expected.type() != actual.type() || expected.cols() != actual.cols() || expected.rows() != actual.rows()) { throw new UnsupportedOperationException( "Can not compare " + expected + " and " + actual); } else if (expected.depth() == CvType.CV_32F || expected.depth() == CvType.CV_64F) { throw new UnsupportedOperationException( "Floating-point mats must not be checked for exact match."); } // Subtract matrices Mat diff = new Mat(); Core.absdiff(expected, actual, diff); // Count non zero pixels Mat reshaped = diff.reshape(1); // One channel int mistakes = Core.countNonZero(reshaped); // Free reshaped.release(); diff.release(); // Check mistakes return 0 == mistakes; }
Example 2
Source File: LKTracker.java From OpenTLDAndroid with Apache License 2.0 | 6 votes |
/** * @return real similarities errors */ private float[] normCrossCorrelation(final Mat lastImg, final Mat currentImg, final Point[] lastPoints, final Point[] currentPoints, final byte[] status){ final float[] similarity = new float[lastPoints.length]; final Mat lastPatch = new Mat(CROSS_CORR_PATCH_SIZE, CvType.CV_8U); final Mat currentPatch = new Mat(CROSS_CORR_PATCH_SIZE, CvType.CV_8U); final Mat res = new Mat(new Size(1, 1), CvType.CV_32F); for(int i = 0; i < lastPoints.length; i++){ if(status[i] == 1){ Imgproc.getRectSubPix(lastImg, CROSS_CORR_PATCH_SIZE, lastPoints[i], lastPatch); Imgproc.getRectSubPix(currentImg, CROSS_CORR_PATCH_SIZE, currentPoints[i], currentPatch); Imgproc.matchTemplate(lastPatch, currentPatch, res, Imgproc.TM_CCOEFF_NORMED); similarity[i] = Util.getFloat(0, 0, res); }else{ similarity[i] = 0f; } } return similarity; }
Example 3
Source File: PerspectiveTransformRunner.java From konduit-serving with Apache License 2.0 | 5 votes |
private Mat pointsToMat(List<Point> points){ int rows = points.size(); int cols = points.get(1).dimensions(); Mat mat = new Mat(rows, cols, CvType.CV_32F); FloatIndexer idx = mat.createIndexer(); for (int i = 0; i < rows; i++) { for (int j = 0; j < cols; j++) { idx.put(i, j, (float)points.get(i).get(j)); } } return mat; }
Example 4
Source File: ImageManipulationsActivity.java From OpenCV-AndroidSamples with MIT License | 5 votes |
public void onCameraViewStarted(int width, int height) { mIntermediateMat = new Mat(); mSize0 = new Size(); mChannels = new MatOfInt[] { new MatOfInt(0), new MatOfInt(1), new MatOfInt(2) }; mBuff = new float[mHistSizeNum]; mHistSize = new MatOfInt(mHistSizeNum); mRanges = new MatOfFloat(0f, 256f); mMat0 = new Mat(); mColorsRGB = new Scalar[] { new Scalar(200, 0, 0, 255), new Scalar(0, 200, 0, 255), new Scalar(0, 0, 200, 255) }; mColorsHue = new Scalar[] { new Scalar(255, 0, 0, 255), new Scalar(255, 60, 0, 255), new Scalar(255, 120, 0, 255), new Scalar(255, 180, 0, 255), new Scalar(255, 240, 0, 255), new Scalar(215, 213, 0, 255), new Scalar(150, 255, 0, 255), new Scalar(85, 255, 0, 255), new Scalar(20, 255, 0, 255), new Scalar(0, 255, 30, 255), new Scalar(0, 255, 85, 255), new Scalar(0, 255, 150, 255), new Scalar(0, 255, 215, 255), new Scalar(0, 234, 255, 255), new Scalar(0, 170, 255, 255), new Scalar(0, 120, 255, 255), new Scalar(0, 60, 255, 255), new Scalar(0, 0, 255, 255), new Scalar(64, 0, 255, 255), new Scalar(120, 0, 255, 255), new Scalar(180, 0, 255, 255), new Scalar(255, 0, 255, 255), new Scalar(255, 0, 215, 255), new Scalar(255, 0, 85, 255), new Scalar(255, 0, 0, 255) }; mWhilte = Scalar.all(255); mP1 = new Point(); mP2 = new Point(); // Fill sepia kernel mSepiaKernel = new Mat(4, 4, CvType.CV_32F); mSepiaKernel.put(0, 0, /* R */0.189f, 0.769f, 0.393f, 0f); mSepiaKernel.put(1, 0, /* G */0.168f, 0.686f, 0.349f, 0f); mSepiaKernel.put(2, 0, /* B */0.131f, 0.534f, 0.272f, 0f); mSepiaKernel.put(3, 0, /* A */0.000f, 0.000f, 0.000f, 1f); }
Example 5
Source File: MatXml.java From Android-Face-Recognition-with-Deep-Learning-Library with Apache License 2.0 | 4 votes |
public void writeMat(String tag, Mat mat) { try { if( isWrite == false) { System.err.println("Try write to file with no write flags"); return; } Element matrix = doc.createElement(tag); matrix.setAttribute("type_id", "opencv-matrix"); rootElement.appendChild(matrix); Element rows = doc.createElement("rows"); rows.appendChild( doc.createTextNode( String.valueOf(mat.rows()) )); Element cols = doc.createElement("cols"); cols.appendChild( doc.createTextNode( String.valueOf(mat.cols()) )); Element dt = doc.createElement("dt"); String dtStr; int type = mat.type(); if(type == CvType.CV_32F ) { // type == CvType.CV_32FC1 dtStr = "f"; } else if( type == CvType.CV_32S ) { // type == CvType.CV_32SC1 dtStr = "i"; } else if( type == CvType.CV_16S ) { // type == CvType.CV_16SC1 dtStr = "s"; } else if( type == CvType.CV_8U ){ // type == CvType.CV_8UC1 dtStr = "b"; } else { dtStr = "unknown"; } dt.appendChild( doc.createTextNode( dtStr )); Element data = doc.createElement("data"); String dataStr = dataStringBuilder( mat ); data.appendChild( doc.createTextNode( dataStr )); // append all to matrix matrix.appendChild( rows ); matrix.appendChild( cols ); matrix.appendChild( dt ); matrix.appendChild( data ); } catch(Exception e) { e.printStackTrace(); } }
Example 6
Source File: MatXml.java From Android-Face-Recognition-with-Deep-Learning-Library with Apache License 2.0 | 4 votes |
private String dataStringBuilder(Mat mat) { StringBuilder sb = new StringBuilder(); int rows = mat.rows(); int cols = mat.cols(); int type = mat.type(); if( type == CvType.CV_32F ) { float fs[] = new float[1]; for( int r=0 ; r<rows ; r++ ) { for( int c=0 ; c<cols ; c++ ) { mat.get(r, c, fs); sb.append( String.valueOf(fs[0])); sb.append( ' ' ); } sb.append( '\n' ); } } else if( type == CvType.CV_32S ) { int is[] = new int[1]; for( int r=0 ; r<rows ; r++ ) { for( int c=0 ; c<cols ; c++ ) { mat.get(r, c, is); sb.append( String.valueOf(is[0])); sb.append( ' ' ); } sb.append( '\n' ); } } else if( type == CvType.CV_16S ) { short ss[] = new short[1]; for( int r=0 ; r<rows ; r++ ) { for( int c=0 ; c<cols ; c++ ) { mat.get(r, c, ss); sb.append( String.valueOf(ss[0])); sb.append( ' ' ); } sb.append( '\n' ); } } else if( type == CvType.CV_8U ) { byte bs[] = new byte[1]; for( int r=0 ; r<rows ; r++ ) { for( int c=0 ; c<cols ; c++ ) { mat.get(r, c, bs); sb.append( String.valueOf(bs[0])); sb.append( ' ' ); } sb.append( '\n' ); } } else { sb.append("unknown type\n"); } return sb.toString(); }
Example 7
Source File: NNClassifier.java From OpenTLDAndroid with Apache License 2.0 | 4 votes |
/** * INPUTs : pExamples, nExamples * @param example NN patch * @return Relative Similarity (rsconf), Conservative Similarity (csconf), In pos. set|Id pos set|In neg. set (isin) */ NNConfStruct nnConf(final Mat example) { if(example == null){ Log.e(Util.TAG, "NNClass.nnConf() - Null example received, stop here"); return new NNConfStruct(null, 0, 0); } if(pExamples.isEmpty()){ // IF positive examples in the model are not defined THEN everything is negative return new NNConfStruct(null, 0, 0); } if(nExamples.isEmpty()){ // IF negative examples in the model are not defined THEN everything is positive return new NNConfStruct(null, 1, 1); } final Mat ncc = new Mat(1, 1, CvType.CV_32F); float nccP=0, csmaxP=0, maxP=0; boolean anyP = false; int maxPidx = 0; final int validatedPart = (int) Math.ceil(pExamples.size() * params.valid); for(int i = 0; i < pExamples.size(); i++){ Imgproc.matchTemplate(pExamples.get(i), example, ncc, Imgproc.TM_CCORR_NORMED); // measure NCC to positive examples nccP = (Util.getFloat(0, 0, ncc) + 1) * 0.5f; if(nccP > params.ncc_thesame){ anyP = true; } if(nccP > maxP){ maxP = nccP; maxPidx = i; if(i < validatedPart){ csmaxP = maxP; } } } float nccN=0, maxN = 0; boolean anyN = false; for(int i = 0; i < nExamples.size(); i++){ Imgproc.matchTemplate(nExamples.get(i), example, ncc, Imgproc.TM_CCORR_NORMED); //measure NCC to negative examples nccN = (Util.getFloat(0, 0, ncc) + 1) * 0.5f; if(nccN > params.ncc_thesame){ anyN = true; } if(nccN > maxN){ maxN = nccN; } } //Log.i(Util.TAG, "nccP=" + nccP + ", nccN=" + nccN + ", csmaxP=" + csmaxP + ", maxP="+ maxP + ", maxN=" + maxN); // put together the result final float dN = 1 - maxN; final float dPrelative = 1 - maxP; final float dPconservative = 1 - csmaxP; return new NNConfStruct(new IsinStruct(anyP, maxPidx, anyN), dN / (dN + dPrelative), dN / (dN + dPconservative)); }
Example 8
Source File: Util.java From OpenTLDAndroid with Apache License 2.0 | 4 votes |
static float getFloat(final int row, final int col, final Mat mat){ if(CvType.CV_32F != mat.type()) throw new IllegalArgumentException("Expected type is CV_32F, we found: " + CvType.typeToString(mat.type())); mat.get(row, col, _floatBuff1); return _floatBuff1[0]; }
Example 9
Source File: Tld.java From OpenTLDAndroid with Apache License 2.0 | 4 votes |
/** * @param boxClusterMap INPUT / OUTPUT * @return Total clusters count */ private int clusterBB(){ final int size = _boxClusterMap.size(); // need the data in arrays final DetectionStruct[] dbb = _boxClusterMap.keySet().toArray(new DetectionStruct[size]); final int[] indexes = new int[size]; for(int i = 0; i < size; i++){ indexes[i] = _boxClusterMap.get(dbb[i]); } // 1. Build proximity matrix final float[] data = new float[size * size]; for(int i = 0; i < size; i++){ for(int j = 0; j < size; j++){ final float d = 1 - dbb[i].detectedBB.calcOverlap(dbb[j].detectedBB); data[i * size + j] = d; data[j * size + i] = d; } } Mat D = new Mat(size, size, CvType.CV_32F); D.put(0, 0, data); // 2. Initialise disjoint clustering final int[] belongs = new int[size]; int m = size; for(int i = 0; i < size; i++){ belongs[i] = i; } for(int it = 0; it < size - 1; it++){ //3. Find nearest neighbour float min_d = 1; int node_a = -1, node_b = -1; for (int i = 0; i < D.rows(); i++){ for (int j = i + 1 ;j < D.cols(); j++){ if (data[i * size + j] < min_d && belongs[i] != belongs[j]){ min_d = data[i * size + j]; node_a = i; node_b = j; } } } // are we done ? if (min_d > 0.5){ int max_idx =0; for (int j = 0; j < size; j++){ boolean visited = false; for(int i = 0; i < 2 * size - 1; i++){ if (belongs[j] == i){ // populate the correct / aggregated cluster indexes[j] = max_idx; visited = true; } } if (visited){ max_idx++; } } // update the main map before going back for(int i = 0; i < size; i++){ _boxClusterMap.put(dbb[i], indexes[i]); } return max_idx; } //4. Merge clusters and assign level if(node_a >= 0 && node_b >= 0){ // this should always BE true, otherwise we would have returned for (int k = 0; k < size; k++){ if (belongs[k] == belongs[node_a] || belongs[k] == belongs[node_b]) belongs[k] = m; } m++; } } // there seem to be only 1 cluster for(int i = 0; i < size; i++){ _boxClusterMap.put(dbb[i], 0); } return 1; }