Java Code Examples for org.opencv.core.MatOfByte#toArray()
The following examples show how to use
org.opencv.core.MatOfByte#toArray() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DetectFace.java From opencv-object-detection with MIT License | 6 votes |
private static BufferedImage ConvertMat2Image(Mat kameraVerisi) { MatOfByte byteMatVerisi = new MatOfByte(); //Ara belle?e verilen formatta g�r�nt� kodlar Imgcodecs.imencode(".jpg", kameraVerisi, byteMatVerisi); //Mat nesnesinin toArray() metodu elemanlary byte dizisine �evirir byte[] byteArray = byteMatVerisi.toArray(); BufferedImage goruntu = null; try { InputStream in = new ByteArrayInputStream(byteArray); goruntu = ImageIO.read(in); } catch (Exception e) { e.printStackTrace(); return null; } return goruntu; }
Example 2
Source File: FaceDetectionController.java From ExoVisix with MIT License | 5 votes |
/** * Convert a Mat object (OpenCV) in the corresponding Image for JavaFX * * @param frame * the {@link Mat} representing the current frame * @return the {@link Image} to show */ private Image mat2Image(Mat frame) { // create a temporary buffer MatOfByte buffer = new MatOfByte(); // encode the frame in the buffer, according to the PNG format Imgcodecs.imencode(".png", frame, buffer); // build and return an Image created from the image encoded in the // buffer return new Image(new ByteArrayInputStream(buffer.toArray())); }
Example 3
Source File: FXController.java From Face-Recognition with Apache License 2.0 | 5 votes |
/** * Convert a Mat object (OpenCV) in the corresponding Image for JavaFX * * @param frame * the {@link Mat} representing the current frame * @return the {@link Image} to show */ private Image mat2Image(Mat frame) { // create a temporary buffer MatOfByte buffer = new MatOfByte(); // encode the frame in the buffer, according to the PNG format Imgcodecs.imencode(".png", frame, buffer); // build and return an Image created from the image encoded in the // buffer return new Image(new ByteArrayInputStream(buffer.toArray())); }
Example 4
Source File: LKTracker.java From OpenTLDAndroid with Apache License 2.0 | 5 votes |
/** * @return Pair of new, FILTERED, last and current POINTS, or null if it hasn't managed to track anything. */ Pair<Point[], Point[]> track(final Mat lastImg, final Mat currentImg, Point[] lastPoints){ final int size = lastPoints.length; final MatOfPoint2f currentPointsMat = new MatOfPoint2f(); final MatOfPoint2f pointsFBMat = new MatOfPoint2f(); final MatOfByte statusMat = new MatOfByte(); final MatOfFloat errSimilarityMat = new MatOfFloat(); final MatOfByte statusFBMat = new MatOfByte(); final MatOfFloat errSimilarityFBMat = new MatOfFloat(); //Forward-Backward tracking Video.calcOpticalFlowPyrLK(lastImg, currentImg, new MatOfPoint2f(lastPoints), currentPointsMat, statusMat, errSimilarityMat, WINDOW_SIZE, MAX_LEVEL, termCriteria, 0, LAMBDA); Video.calcOpticalFlowPyrLK(currentImg, lastImg, currentPointsMat, pointsFBMat, statusFBMat, errSimilarityFBMat, WINDOW_SIZE, MAX_LEVEL, termCriteria, 0, LAMBDA); final byte[] status = statusMat.toArray(); float[] errSimilarity = new float[lastPoints.length]; //final byte[] statusFB = statusFBMat.toArray(); final float[] errSimilarityFB = errSimilarityFBMat.toArray(); // compute the real FB error (relative to LAST points not the current ones... final Point[] pointsFB = pointsFBMat.toArray(); for(int i = 0; i < size; i++){ errSimilarityFB[i] = Util.norm(pointsFB[i], lastPoints[i]); } final Point[] currPoints = currentPointsMat.toArray(); // compute real similarity error errSimilarity = normCrossCorrelation(lastImg, currentImg, lastPoints, currPoints, status); //TODO errSimilarityFB has problem != from C++ // filter out points with fwd-back error > the median AND points with similarity error > median return filterPts(lastPoints, currPoints, errSimilarity, errSimilarityFB, status); }
Example 5
Source File: ImageProcessor.java From video-stream-classification with Apache License 2.0 | 4 votes |
/** * Method to process images * * @param camId camera Id * @param frames list of VideoEventData * @param outputDir directory to save image files * @return last processed VideoEventData * @throws Exception */ public static VideoEventData process(String camId, Iterator<VideoEventData> frames, String outputDir, VideoEventData previousProcessedEventData) throws Exception { VideoEventData currentProcessedEventData = new VideoEventData(); Mat frame = null; double imageWidth = 640; double imageHeight = 480; Size sz = new Size(imageWidth, imageHeight); int frameCount = 0; //Add frames to list ArrayList<VideoEventData> sortedList = new ArrayList<VideoEventData>(); while(frames.hasNext()){ sortedList.add(frames.next()); } //previous processed frame if (previousProcessedEventData != null) { logger.warn("cameraId=" + camId + " previous processed timestamp=" + previousProcessedEventData.getTimestamp()); sortedList.add(previousProcessedEventData); } //sort frames by timestamp sortedList.sort(Comparator.comparing(VideoEventData::getTimestamp)); logger.warn("cameraId="+camId+" total frames="+sortedList.size()); //iterate and classify every 10th frame for (VideoEventData eventData : sortedList) { frame = getMat(eventData); Imgproc.resize(frame, frame, sz); frameCount++; if(frameCount == 10){ MatOfByte bytemat = new MatOfByte(); Imgcodecs.imencode(".jpg", frame, bytemat); byte[] bytes = bytemat.toArray(); String match = ImageClassifier.classifyImage(bytes); logger.info("Best Match "+match); saveImageAndData(frame, eventData, match, outputDir); frameCount = 0; } currentProcessedEventData = eventData; } return currentProcessedEventData; }
Example 6
Source File: VideoStreamingThread.java From faceswap with Apache License 2.0 | 4 votes |
@Override protected byte[] doInBackground(Mat... frames) { Mat frame=frames[0]; if (frame_firstUpdateTime == 0) { frame_firstUpdateTime = System.currentTimeMillis(); } frame_currentUpdateTime = System.currentTimeMillis(); long time = System.currentTimeMillis(); //android compression version int datasize = 0; byte[] byterray=null; if (Const.USE_JPEG_COMPRESSION){ Bitmap bmp = Bitmap.createBitmap(frame.cols(), frame.rows(), Bitmap.Config.ARGB_8888); Utils.matToBitmap(frame, bmp); ByteArrayOutputStream bos=new ByteArrayOutputStream(); bmp.compress(Bitmap.CompressFormat.JPEG, 80, bos); byterray=bos.toByteArray(); bmp.recycle(); } else { MatOfByte byteMat = new MatOfByte(); //changed to rgb so that decoding side is correct Imgproc.cvtColor(frame, frame, Imgproc.COLOR_BGRA2RGB); Imgcodecs.imencode(".ppm", frame, byteMat); Log.d(LOG_TAG, "encoding took " + (System.currentTimeMillis() - time)); byterray = byteMat.toArray(); } //opencv jpeg compression version // before compression: 640 * 480 = 900K, after compression ~300k // int datasize = 0; // MatOfByte jpgByteMat = new MatOfByte(); // MatOfInt params = new MatOfInt(Imgcodecs.IMWRITE_JPEG_QUALITY, 80); // Imgcodecs.imencode(".jpg", frame, jpgByteMat, params); // Log.d(LOG_TAG, "compression imencode took " + (System.currentTimeMillis() - time)); // Log.d(LOG_TAG, "original size: " + frame.total() * frame.elemSize() // + " after compression " + jpgByteMat.total() * jpgByteMat.elemSize()); // byte[] byterray = jpgByteMat.toArray(); //libjpeg-turbo compress // int datasize = 0; // //native lib use ARGB_8888 format // Bitmap bmp = Bitmap.createBitmap(frame.cols(), frame.rows(), Bitmap.Config.ARGB_8888); // Utils.matToBitmap(frame, bmp); // NativeUtil.compressBitmap(bmp, 95, "/sdcard/tmp/compression_test.jpg", true); // Log.d(LOG_TAG, "native compression took " + (System.currentTimeMillis()-time)); // // ByteArrayOutputStream bos=new ByteArrayOutputStream(); // bmp.compress(Bitmap.CompressFormat.JPEG, 70, bos); // byte[] byterray=bos.toByteArray(); synchronized (frameLock) { frameBuffer = byterray; frameGeneratedTime = System.currentTimeMillis(); frameID++; frameLock.notify(); } Log.d(LOG_TAG, "encoding routine took " + (System.currentTimeMillis()-time)); return byterray; }
Example 7
Source File: CameraStream.java From tutorials with MIT License | 4 votes |
public Image mat2Img(Mat mat) { MatOfByte bytes = new MatOfByte(); Imgcodecs.imencode("img", mat, bytes); ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes.toArray()); Image img = new Image(inputStream); return img; }
Example 8
Source File: FaceDetection.java From tutorials with MIT License | 4 votes |
public Image mat2Img(Mat mat) { MatOfByte bytes = new MatOfByte(); Imgcodecs.imencode("img", mat, bytes); ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes.toArray()); Image img = new Image(inputStream); return img; }
Example 9
Source File: ImageUtils.java From StormCV with Apache License 2.0 | 2 votes |
/** * Creates a byte representation of the provided mat object encoded using the imageType * @param mat * @param imageType * @return */ public static byte[] Mat2ImageBytes(Mat mat, String imageType){ MatOfByte buffer = new MatOfByte(); Highgui.imencode("."+imageType, mat, buffer); return buffer.toArray(); }