org.bytedeco.javacv.Java2DFrameConverter Java Examples
The following examples show how to use
org.bytedeco.javacv.Java2DFrameConverter.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: LivePlayTest3.java From oim-fx with MIT License | 7 votes |
public BufferedImage getBufferedImage() { Frame capturedFrame = null; boolean flipChannels = false; BufferedImage bufferedImage = null; try { if (start) { if ((capturedFrame = grabber.grab()) != null) { int type = Java2DFrameConverter.getBufferedImageType(capturedFrame); double gamma = type == BufferedImage.TYPE_CUSTOM ? 1.0 : inverseGamma; bufferedImage = converter.getBufferedImage(capturedFrame, gamma, flipChannels, null); Image image = bufferedImage; ImageIcon icon = new ImageIcon(image); playLabel.setIcon(icon); } } } catch (org.bytedeco.javacv.FrameGrabber.Exception e) { e.printStackTrace(); } return bufferedImage; }
Example #2
Source File: CameraVideoPanel.java From oim-fx with MIT License | 6 votes |
public Frame getFrame() { Frame capturedFrame = null; boolean flipChannels = false; BufferedImage bufferedImage = null; try { if (start) { if ((capturedFrame = grabber.grab()) != null) { int type = Java2DFrameConverter.getBufferedImageType(capturedFrame); double gamma = type == BufferedImage.TYPE_CUSTOM ? 1.0 : inverseGamma; bufferedImage = converter.getBufferedImage(capturedFrame, gamma, flipChannels, null); Image image = bufferedImage; ImageIcon icon = new ImageIcon(image); iconLabel.setIcon(icon); } } } catch (org.bytedeco.javacv.FrameGrabber.Exception e) { e.printStackTrace(); } return capturedFrame; }
Example #3
Source File: LivePlayTest2.java From oim-fx with MIT License | 6 votes |
public BufferedImage getBufferedImage() { Frame capturedFrame = null; boolean flipChannels = false; BufferedImage bufferedImage = null; try { if (start) { if ((capturedFrame = grabber.grab()) != null) { int type = Java2DFrameConverter.getBufferedImageType(capturedFrame); double gamma = type == BufferedImage.TYPE_CUSTOM ? 1.0 : inverseGamma; bufferedImage = converter.getBufferedImage(capturedFrame, gamma, flipChannels, null); Image image = bufferedImage; ImageIcon icon = new ImageIcon(image); playLabel.setIcon(icon); } } } catch (org.bytedeco.javacv.FrameGrabber.Exception e) { e.printStackTrace(); } return bufferedImage; }
Example #4
Source File: LivePlayTest.java From oim-fx with MIT License | 6 votes |
public BufferedImage getBufferedImage() { Frame capturedFrame = null; boolean flipChannels = false; BufferedImage bufferedImage = null; try { if (start) { if ((capturedFrame = grabber.grab()) != null) { int type = Java2DFrameConverter.getBufferedImageType(capturedFrame); double gamma = type == BufferedImage.TYPE_CUSTOM ? 1.0 : inverseGamma; bufferedImage = converter.getBufferedImage(capturedFrame, gamma, flipChannels, null); Image image = bufferedImage; ImageIcon icon = new ImageIcon(image); playLabel.setIcon(icon); } } } catch (org.bytedeco.javacv.FrameGrabber.Exception e) { e.printStackTrace(); } return bufferedImage; }
Example #5
Source File: CameraVideo.java From oim-fx with MIT License | 6 votes |
public BufferedImage getBufferedImage() { Frame capturedFrame = null; boolean flipChannels = false; BufferedImage bufferedImage = null; try { if (start) { if ((capturedFrame = grabber.grab()) != null) { int type = Java2DFrameConverter.getBufferedImageType(capturedFrame); double gamma = type == BufferedImage.TYPE_CUSTOM ? 1.0 : inverseGamma; bufferedImage = converter.getBufferedImage(capturedFrame, gamma, flipChannels, null); } } } catch (org.bytedeco.javacv.FrameGrabber.Exception e) { e.printStackTrace(); } return bufferedImage; }
Example #6
Source File: ConverterService.java From Spring with Apache License 2.0 | 6 votes |
public void toAnimatedGif(FFmpegFrameGrabber frameGrabber, AnimatedGifEncoder gifEncoder, int start, int end, int speed) throws FrameGrabber.Exception { final long startFrame = Math.round(start * frameGrabber.getFrameRate()); final long endFrame = Math.round(end * frameGrabber.getFrameRate()); final Java2DFrameConverter frameConverter = new Java2DFrameConverter(); for (long i = startFrame; i < endFrame; i++) { if (i % speed == 0) { // Bug if frameNumber is set to 0 if (i > 0) { frameGrabber.setFrameNumber((int) i); } final BufferedImage bufferedImage = frameConverter.convert(frameGrabber.grabImage()); gifEncoder.addFrame(bufferedImage); } } frameGrabber.stop(); gifEncoder.finish(); }
Example #7
Source File: JavaCVReadAVI.java From Data_Processor with Apache License 2.0 | 6 votes |
public void run() throws Exception, InterruptedException { FFmpegFrameGrabber ffmpegFrameGrabber = FFmpegFrameGrabber.createDefault("C:/Users/Administrator/Desktop/deta/detasource/videoProcess/webwxgetvideo.avi"); ffmpegFrameGrabber.start(); // int fflength = ffmpegFrameGrabber.getLengthInFrames(); // int maxStamp = (int) (ffmpegFrameGrabber.getLengthInTime()/1000000); // int count = 0; while (true) { Frame nowFrame = ffmpegFrameGrabber.grabImage(); // int startStamp = (int) (ffmpegFrameGrabber.getTimestamp() * 1.0/1000000); // double present = (startStamp * 1.0 / maxStamp) * 100; if (nowFrame == null) { System.out.println("!!! Failed cvQueryFrame"); continue; } Java2DFrameConverter paintConverter = new Java2DFrameConverter(); BufferedImage difImage = paintConverter.getBufferedImage(nowFrame, 1); paint(difImage); Thread.sleep(25); } }
Example #8
Source File: CameraVideoPanel.java From oim-fx with MIT License | 6 votes |
public BufferedImage getBufferedImage() { Frame capturedFrame = null; boolean flipChannels = false; BufferedImage bufferedImage = null; try { if (start) { if ((capturedFrame = grabber.grab()) != null) { int type = Java2DFrameConverter.getBufferedImageType(capturedFrame); double gamma = type == BufferedImage.TYPE_CUSTOM ? 1.0 : inverseGamma; bufferedImage = converter.getBufferedImage(capturedFrame, gamma, flipChannels, null); Image image = bufferedImage; ImageIcon icon = new ImageIcon(image); iconLabel.setIcon(icon); } } } catch (org.bytedeco.javacv.FrameGrabber.Exception e) { e.printStackTrace(); } return bufferedImage; }
Example #9
Source File: ImageConversionUtils.java From deeplearning4j with Apache License 2.0 | 5 votes |
public static BufferedImage makeRandomBufferedImage(int height, int width, int channels) { Mat img = makeRandomImage(height, width, channels); OpenCVFrameConverter.ToMat c = new OpenCVFrameConverter.ToMat(); Java2DFrameConverter c2 = new Java2DFrameConverter(); return c2.convert(c.convert(img)); }
Example #10
Source File: TestNativeImageLoader.java From deeplearning4j with Apache License 2.0 | 5 votes |
BufferedImage makeRandomBufferedImage(int height, int width, int channels) { Mat img = makeRandomImage(height, width, channels); OpenCVFrameConverter.ToMat c = new OpenCVFrameConverter.ToMat(); Java2DFrameConverter c2 = new Java2DFrameConverter(); return c2.convert(c.convert(img)); }
Example #11
Source File: Chronometer.java From procamtracker with GNU General Public License v2.0 | 5 votes |
public void draw(IplImage image) { long time; if (startTime < 0) { startTime = System.currentTimeMillis(); time = 0; } else { time = System.currentTimeMillis() - startTime; } long minutes = time/1000/60; long seconds = time/1000 - minutes*60; long deciseconds = time/100 - seconds*10 - minutes*600; chronoGraphics.clearRect(0, 0, chronoImage.getWidth(), chronoImage.getHeight()); chronoGraphics.setFont(bigFont); int x = (int)((roi.width -bounds.getWidth ())/2 - bounds.getX()), y = (int)((roi.height-bounds.getHeight())/2 - bounds.getY()); chronoGraphics.drawString(Long.toString(minutes), x, y); x+=bigFontMetrics.stringWidth("0"); chronoGraphics.drawString("′", x, y); x+=bigFontMetrics.stringWidth("′"); chronoGraphics.drawString((seconds < 10 ? "0" : "") + seconds, x, y); x+=bigFontMetrics.stringWidth("00"); chronoGraphics.drawString("″", x, y); x+=bigFontMetrics.stringWidth("″"); chronoGraphics.setFont(smallFont); chronoGraphics.drawString(Long.toString(deciseconds), x, y); if (roi.x < 0) { roi.x += image.width(); } if (roi.y < 0) { roi.y += image.height(); } Java2DFrameConverter.copy(chronoImage, converter.convert(image), 1.0, image.nChannels() == 4, roi); }
Example #12
Source File: ConverterUtil.java From marvinproject with GNU Lesser General Public License v3.0 | 5 votes |
public static IplImage bufferedToIplImage(BufferedImage bufImage) { ToIplImage iplConverter = new OpenCVFrameConverter.ToIplImage(); Java2DFrameConverter java2dConverter = new Java2DFrameConverter(); IplImage iplImage = iplConverter.convert(java2dConverter .convert(bufImage)); return iplImage; }
Example #13
Source File: TestNativeImageLoader.java From DataVec with Apache License 2.0 | 5 votes |
BufferedImage makeRandomBufferedImage(int height, int width, int channels) { Mat img = makeRandomImage(height, width, channels); OpenCVFrameConverter.ToMat c = new OpenCVFrameConverter.ToMat(); Java2DFrameConverter c2 = new Java2DFrameConverter(); return c2.convert(c.convert(img)); }
Example #14
Source File: ColoredObjectTracker.java From ExoVisix with MIT License | 5 votes |
public IplImage Equalize(BufferedImage bufferedimg) { Java2DFrameConverter converter1 = new Java2DFrameConverter(); OpenCVFrameConverter.ToIplImage converter2 = new OpenCVFrameConverter.ToIplImage(); IplImage iploriginal = converter2.convert(converter1.convert(bufferedimg)); IplImage srcimg = IplImage.create(iploriginal.width(), iploriginal.height(), IPL_DEPTH_8U, 1); IplImage destimg = IplImage.create(iploriginal.width(), iploriginal.height(), IPL_DEPTH_8U, 1); cvCvtColor(iploriginal, srcimg, CV_BGR2GRAY); cvEqualizeHist(srcimg, destimg); return destimg; }
Example #15
Source File: VidImageSequence.java From GIFKR with GNU Lesser General Public License v3.0 | 5 votes |
@Override public BufferedImage getFrame(int gifFrame) { BufferedImage frame = null; try { g.setFrameNumber(gifFrame+1); frame = new Java2DFrameConverter().convert(g.grabImage()); } catch (Exception e) { e.printStackTrace(); } return frame == null? new BufferedImage(width, height, BufferedImage.TYPE_3BYTE_BGR) : frame; }
Example #16
Source File: Yolo.java From Java-Machine-Learning-for-Computer-Vision with MIT License | 5 votes |
private INDArray prepareImage(Frame frame, int width, int height) throws IOException { if (frame == null || frame.image == null) { return null; } BufferedImage convert = new Java2DFrameConverter().convert(frame); return prepareImage(convert, width, height); }
Example #17
Source File: ConverterUtil.java From marvinproject with GNU Lesser General Public License v3.0 | 4 votes |
public static BufferedImage IplImageToBufferedImage(IplImage src) { OpenCVFrameConverter.ToIplImage grabberConverter = new OpenCVFrameConverter.ToIplImage(); Java2DFrameConverter paintConverter = new Java2DFrameConverter(); Frame frame = grabberConverter.convert(src); return paintConverter.getBufferedImage(frame, 1); }
Example #18
Source File: RobotUtils.java From karate with MIT License | 4 votes |
public static BufferedImage toBufferedImage(Mat mat) { OpenCVFrameConverter.ToMat openCVConverter = new OpenCVFrameConverter.ToMat(); Java2DFrameConverter java2DConverter = new Java2DFrameConverter(); return java2DConverter.convert(openCVConverter.convert(mat)); }
Example #19
Source File: ConverterUtil.java From marvinproject with GNU Lesser General Public License v3.0 | 4 votes |
public static BufferedImage frametoBufferedImage(Frame frame) { Java2DFrameConverter paintConverter = new Java2DFrameConverter(); return paintConverter.getBufferedImage(frame, 1); }
Example #20
Source File: TrackingWorker.java From procamtracker with GNU General Public License v2.0 | 4 votes |
public void run() { final int maxLevel = alignerSettings.getPyramidLevelMax(); try { RealityAugmentor.VirtualSettings virtualSettings = realityAugmentor.getVirtualSettings(); if (aligner == null || (virtualSettings != null && virtualSettings.projectionType != RealityAugmentor.ProjectionType.FIXED)) { ProjectorBuffer pb = projectorBufferRing.get(1); if (trackingSettings.useOpenCL) { ((ProCamTransformerCL)transformer).setProjectorImageCL(pb.imageCL, 0, maxLevel); } if (aligner == null || !trackingSettings.useOpenCL) { // used during initialization, even for OpenCL transformer.setProjectorImage(pb.image, 0, maxLevel); } } grabbedImage = grabberConverter.convert(frameGrabber.getDelayedFrame()); if (grabbedImage == null) { grabbedImage = grabberConverter.convert(frameGrabber.grab()); } if (grabbedImage != null) { // gamma "uncorrection", linearization double gamma = frameGrabber.getGamma(); if (gamma != 1.0) { Buffer buffer = grabbedImage.createBuffer(); int depth = OpenCVFrameConverter.getFrameDepth(grabbedImage.depth()); int stride = grabbedImage.widthStep() * 8 / Math.abs(depth); Java2DFrameConverter.applyGamma(buffer, depth, stride, gamma); } if (trackingSettings.useOpenCL) { if (aligner != null && alignerSettings.getDisplacementMax() > 0) { double[] pts = aligner.getTransformedRoiPts(); int width = grabbedImageCL.width; int height = grabbedImageCL.height; roi.x(0).y(0).width(width).height(height); int padX = (int)Math.round(alignerSettings.getDisplacementMax()*width); int padY = (int)Math.round(alignerSettings.getDisplacementMax()*height); int align = 1<<(maxLevel+1); // add +3 all around because pyrDown() needs it for smoothing JavaCV.boundingRect(pts, roi, padX+3, padY+3, align, align); cvSetImageROI(grabbedImage, roi); } else { cvResetImageROI(grabbedImage); } contextCL.writeImage(grabbedImageCL, grabbedImage, false); cvResetImageROI(grabbedImage); contextCL.remap(grabbedImageCL, undistortedCameraImageCL, cameraMapxCL, cameraMapyCL, frameGrabber.getSensorPattern()); //contextCL.readImage(undistortedCameraImageCL, cameraInitFloatImages[0], true); //monitorWindows[1].showImage(cameraInitFloatImages[0], true); if (aligner != null) { ((GNImageAlignerCL)aligner).setTargetImageCL(undistortedCameraImageCL); } } else { cameraDevice.undistort(grabbedImage, undistortedCameraImage); if (aligner != null) { aligner.setTargetImage(undistortedCameraImage); } } } } catch (Exception e) { throw new RuntimeException(e); } }
Example #21
Source File: Animation.java From GIFKR with GNU Lesser General Public License v3.0 | 4 votes |
public void saveVid(File f, int width, int fps, ProgressDisplay d, ActionListener onFinish) { saveStopped = false; new Thread(() -> { try { d.setProgress(0, "Starting export"); d.setCancel(ae -> saveStopped = true); String name = f.getName(); int dotIdx = name.lastIndexOf('.'); if(dotIdx !=- 1) name = name.substring(0, dotIdx); String ext = ".mp4"; File out = StringUtil.resolveConflictName(f.getParentFile(), name+ext, false); int height = (int)Math.round(width * getSourceHeight()/(float)getSourceWidth()); height = height %2 == 0 ? height : height + 1; FFmpegFrameRecorder recorder = new FFmpegFrameRecorder(out.getAbsolutePath(), width, height); recorder.setFrameRate(fps); recorder.setFormat("mp4"); recorder.setPixelFormat(avutil.AV_PIX_FMT_YUV420P); //recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264); recorder.setVideoBitrate(recorder.getImageWidth() * recorder.getImageHeight() * fps * 10); recorder.setVideoQuality(.1); recorder.start(); for(int i = 0; i < frames && !saveStopped; i++) { try { setX(i/(float)(frames-1)); d.setProgress((i/(float)(frames-1))*.95f, "Writing frame "+i+" of " + frames); BufferedImage frame = renderFrame(width); BufferedImage img = new BufferedImage(frame.getWidth(), frame.getHeight(), BufferedImage.TYPE_INT_RGB); img.getGraphics().drawImage(frame, 0, 0, null); recorder.record(new Java2DFrameConverter().convert(img), avutil.AV_PIX_FMT_ARGB); } catch (Exception e) { e.printStackTrace(); } } d.setProgress(.98, "Finishing export..."); recorder.stop(); recorder.close(); } catch (Exception e1) { e1.printStackTrace(); } finally { if(onFinish != null) SwingUtilities.invokeLater(() -> onFinish.actionPerformed(new ActionEvent(this, ActionEvent.ACTION_FIRST, "save"))); } }).start(); }