org.bytedeco.javacv.FrameGrabber Java Examples
The following examples show how to use
org.bytedeco.javacv.FrameGrabber.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: LivePlayTest3.java From oim-fx with MIT License | 7 votes |
public BufferedImage getBufferedImage() { Frame capturedFrame = null; boolean flipChannels = false; BufferedImage bufferedImage = null; try { if (start) { if ((capturedFrame = grabber.grab()) != null) { int type = Java2DFrameConverter.getBufferedImageType(capturedFrame); double gamma = type == BufferedImage.TYPE_CUSTOM ? 1.0 : inverseGamma; bufferedImage = converter.getBufferedImage(capturedFrame, gamma, flipChannels, null); Image image = bufferedImage; ImageIcon icon = new ImageIcon(image); playLabel.setIcon(icon); } } } catch (org.bytedeco.javacv.FrameGrabber.Exception e) { e.printStackTrace(); } return bufferedImage; }
Example #2
Source File: LivePlayTest2.java From oim-fx with MIT License | 6 votes |
public BufferedImage getBufferedImage() { Frame capturedFrame = null; boolean flipChannels = false; BufferedImage bufferedImage = null; try { if (start) { if ((capturedFrame = grabber.grab()) != null) { int type = Java2DFrameConverter.getBufferedImageType(capturedFrame); double gamma = type == BufferedImage.TYPE_CUSTOM ? 1.0 : inverseGamma; bufferedImage = converter.getBufferedImage(capturedFrame, gamma, flipChannels, null); Image image = bufferedImage; ImageIcon icon = new ImageIcon(image); playLabel.setIcon(icon); } } } catch (org.bytedeco.javacv.FrameGrabber.Exception e) { e.printStackTrace(); } return bufferedImage; }
Example #3
Source File: UploadController.java From Spring with Apache License 2.0 | 6 votes |
/** * curl -F file=@/home/olinnyk/IdeaProjects/Spring/SpringWEB/SpringBoot/just-gif-it/video/PexelsVideos.mp4 -F start=0 -F end=5 -F speed=1 -F repeat=0 localhost:8080/upload */ @PostMapping @RequestMapping(value = "/upload", produces = MediaType.IMAGE_GIF_VALUE) public String upload(@RequestPart("file") MultipartFile file, @RequestParam("start") int start, @RequestParam("end") int end, @RequestParam("speed") int speed, @RequestParam("repeat") boolean repeat) throws IOException, FrameGrabber.Exception { final File videoFile = new File(location + "/" + System.currentTimeMillis() + ".mp4"); file.transferTo(videoFile); log.info("Saved video file to {}", videoFile.getAbsolutePath()); final Path output = Paths.get(location + "/gif/" + System.currentTimeMillis() + ".gif"); final FFmpegFrameGrabber frameGrabber = videoDecoderService.read(videoFile); final AnimatedGifEncoder gifEncoder = gifEncoderService.getGifEncoder(repeat, (float) frameGrabber.getFrameRate(), output); converterService.toAnimatedGif(frameGrabber, gifEncoder, start, end, speed); log.info("Saved generated gif to {}", output.toString()); return output.getFileName().toString(); }
Example #4
Source File: ConverterService.java From Spring with Apache License 2.0 | 6 votes |
public void toAnimatedGif(FFmpegFrameGrabber frameGrabber, AnimatedGifEncoder gifEncoder, int start, int end, int speed) throws FrameGrabber.Exception { final long startFrame = Math.round(start * frameGrabber.getFrameRate()); final long endFrame = Math.round(end * frameGrabber.getFrameRate()); final Java2DFrameConverter frameConverter = new Java2DFrameConverter(); for (long i = startFrame; i < endFrame; i++) { if (i % speed == 0) { // Bug if frameNumber is set to 0 if (i > 0) { frameGrabber.setFrameNumber((int) i); } final BufferedImage bufferedImage = frameConverter.convert(frameGrabber.grabImage()); gifEncoder.addFrame(bufferedImage); } } frameGrabber.stop(); gifEncoder.finish(); }
Example #5
Source File: MarvinJavaCVAdapter.java From marvinproject with GNU Lesser General Public License v3.0 | 6 votes |
@Override public void connect(int deviceIndex, int width, int height) throws MarvinVideoInterfaceException { mode = MODE.DEVICE; this.width = width; this.height = height; marvinImage = new MarvinImage(width, height); intArray = new int[height*width*4]; try{ /* * FrameGrabber.createDefault(deviceIndex); works for both windows and Linux * VideoFrameGrabber didnt work in Linux */ grabber= FrameGrabber.createDefault(deviceIndex); grabber.setImageWidth(width); grabber.setImageHeight(height); grabber.start(); grabber.grab(); connected = true; } catch(Exception e){ throw new MarvinVideoInterfaceException("Error while trying to connect to the device", e); } }
Example #6
Source File: VideoPlayer.java From Java-Machine-Learning-for-Computer-Vision with MIT License | 6 votes |
private void runVideoMainThread(String videoFileName, OpenCVFrameConverter.ToMat toMat) throws FrameGrabber.Exception { FFmpegFrameGrabber grabber = initFrameGrabber(videoFileName); while (!stop) { Frame frame = grabber.grab(); if (frame == null) { log.info("Stopping"); stop(); break; } if (frame.image == null) { continue; } yolo.push(frame); opencv_core.Mat mat = toMat.convert(frame); yolo.drawBoundingBoxesRectangles(frame, mat); imshow(windowName, mat); char key = (char) waitKey(20); // Exit this loop on escape: if (key == 27) { stop(); break; } } }
Example #7
Source File: CameraFFMPEG.java From PapARt with GNU Lesser General Public License v3.0 | 6 votes |
@Override public void start() { FFmpegFrameGrabber grabberFF = new FFmpegFrameGrabber(this.cameraDescription); grabberFF.setImageMode(FrameGrabber.ImageMode.COLOR); this.setPixelFormat(PixelFormat.BGR); grabberFF.setFormat(this.imageFormat); grabberFF.setImageWidth(width()); grabberFF.setImageHeight(height()); grabberFF.setFrameRate(frameRate); try { grabberFF.start(); this.grabber = grabberFF; this.isConnected = true; } catch (FrameGrabber.Exception e) { System.err.println("Could not FFMPEG frameGrabber... " + e); System.err.println("Camera ID " + this.cameraDescription + ":" + this.imageFormat + " could not start."); System.err.println("Check cable connection, ID and resolution asked."); this.grabber = null; } }
Example #8
Source File: CameraOpenCV.java From PapARt with GNU Lesser General Public License v3.0 | 6 votes |
@Override public void start() { OpenCVFrameGrabber grabberCV = new OpenCVFrameGrabber(this.systemNumber); grabberCV.setImageWidth(width()); grabberCV.setImageHeight(height()); grabberCV.setFrameRate(frameRate); grabberCV.setImageMode(FrameGrabber.ImageMode.COLOR); try { grabberCV.start(); this.grabber = grabberCV; this.isConnected = true; } catch (Exception e) { System.err.println("Could not start frameGrabber... " + e); System.err.println("Could not camera start frameGrabber... " + e); System.err.println("Camera ID " + this.systemNumber + " could not start."); System.err.println("Check cable connection, ID and resolution asked."); this.grabber = null; } }
Example #9
Source File: CameraFlyCapture.java From PapARt with GNU Lesser General Public License v3.0 | 6 votes |
@Override public void start() { try { FlyCapture2FrameGrabber grabberFly = new FlyCapture2FrameGrabber(this.systemNumber); grabberFly.setImageWidth(width()); grabberFly.setImageHeight(height()); if (useBayerDecode) { grabberFly.setImageMode(FrameGrabber.ImageMode.GRAY); } else { // Hack for now ... // real Gray colors are not supported by Processing anyway ! grabberFly.setImageMode(FrameGrabber.ImageMode.COLOR); } this.grabber = grabberFly; grabberFly.start(); this.isConnected = true; } catch (Exception e) { System.err.println("Could not start FlyCapture frameGrabber... " + e); System.err.println("Camera ID " + this.systemNumber + " could not start."); System.err.println("Check cable connection, ID and resolution asked."); } }
Example #10
Source File: JavaCVExample.java From javacv-cnn-example with MIT License | 5 votes |
/** * Stops and released resources attached to frame grabbing. Stops frame processing and, */ public void stop() { running = false; try { logger.debug("Releasing and stopping FrameGrabber"); frameGrabber.release(); frameGrabber.stop(); } catch (FrameGrabber.Exception e) { logger.error("Error occurred when stopping the FrameGrabber", e); } window.dispose(); }
Example #11
Source File: CameraRealSense.java From PapARt with GNU Lesser General Public License v3.0 | 5 votes |
/** * Experimental. * * @return */ public Frame grabFrame() { try { return grabber.grab(); } catch (FrameGrabber.Exception ex) { Logger.getLogger(CameraRealSense.class.getName()).log(Level.SEVERE, null, ex); } return null; }
Example #12
Source File: CameraFFMPEG.java From PapARt with GNU Lesser General Public License v3.0 | 5 votes |
private void checkEndOfVideo() { // 10 frames from the end. if (grabber.getFrameNumber() + 10 > grabber.getLengthInFrames()) { try { grabber.setFrameNumber(0); } catch (FrameGrabber.Exception ex) { Logger.getLogger(CameraFFMPEG.class.getName()).log(Level.SEVERE, null, ex); } } }
Example #13
Source File: CameraOpenKinect.java From PapARt with GNU Lesser General Public License v3.0 | 5 votes |
@Override public void grabColor() { try { colorCamera.updateCurrentImage(grabber.grabVideo()); } catch (FrameGrabber.Exception ex) { Logger.getLogger(CameraOpenKinect.class.getName()).log(Level.SEVERE, null, ex); } }
Example #14
Source File: CameraOpenKinect.java From PapARt with GNU Lesser General Public License v3.0 | 5 votes |
@Override public void grabDepth() { try { depthCamera.currentImage = grabber.grabDepth(); ((WithTouchInput) depthCamera).newTouchImageWithColor(colorCamera.currentImage); } catch (FrameGrabber.Exception ex) { Logger.getLogger(CameraOpenKinect.class.getName()).log(Level.SEVERE, null, ex); } }
Example #15
Source File: ColoredObjectTracker.java From ExoVisix with MIT License | 5 votes |
public void run() { try { grabber = FrameGrabber.createDefault(CAMERA_NUM); converter = new OpenCVFrameConverter.ToIplImage(); grabber.start(); int posX = 0; int posY = 0; while (true) { img = converter.convert(grabber.grab()); if (img != null) { // show image on window cvFlip(img, img, 1);// l-r = 90_degrees_steps_anti_clockwise canvas.showImage(converter.convert(img)); IplImage detectThrs = getThresholdImage(img); CvMoments moments = new CvMoments(); cvMoments(detectThrs, moments, 1); double mom10 = cvGetSpatialMoment(moments, 1, 0); double mom01 = cvGetSpatialMoment(moments, 0, 1); double area = cvGetCentralMoment(moments, 0, 0); posX = (int) (mom10 / area); posY = (int) (mom01 / area); // only if its a valid position if (posX > 0 && posY > 0) { paint(img, posX, posY); } } // Thread.sleep(INTERVAL); } } catch (Exception e) { } }
Example #16
Source File: JavaCVExample.java From javacv-cnn-example with MIT License | 5 votes |
/** * Starts the frame grabbers and then the frame processing. Grabbed and processed frames will be displayed in the * {@link #videoPanel} */ public void start() { // frameGrabber = new FFmpegFrameGrabber("/dev/video0"); // The available FrameGrabber classes include OpenCVFrameGrabber (opencv_videoio), // DC1394FrameGrabber, FlyCapture2FrameGrabber, OpenKinectFrameGrabber, // PS3EyeFrameGrabber, VideoInputFrameGrabber, and FFmpegFrameGrabber. frameGrabber = new OpenCVFrameGrabber(0); //frameGrabber.setFormat("mp4"); frameGrabber.setImageWidth(1280); frameGrabber.setImageHeight(720); logger.debug("Starting frame grabber"); try { frameGrabber.start(); logger.debug("Started frame grabber with image width-height : {}-{}", frameGrabber.getImageWidth(), frameGrabber.getImageHeight()); } catch (FrameGrabber.Exception e) { logger.error("Error when initializing the frame grabber", e); throw new RuntimeException("Unable to start the FrameGrabber", e); } SwingUtilities.invokeLater(() -> { window.setVisible(true); }); process(); logger.debug("Stopped frame grabbing."); }
Example #17
Source File: VideoThumbnailBuilder.java From document-management-software with GNU Lesser General Public License v3.0 | 5 votes |
private void writeVideoFrame(File videoFile, File frameFile) throws Exception { FrameGrabber g = new OpenCVFrameGrabber(videoFile); g.start(); /* * Try to get a frame after 60 seconds */ double frameRate = g.getFrameRate(); int fiveSecondsFrame = (int) (60 * frameRate); if (fiveSecondsFrame > g.getLengthInFrames()) fiveSecondsFrame = 1; try { for (int i = 0; i < g.getLengthInFrames() && frameFile.length() == 0; i++) { try { if (i < fiveSecondsFrame) { g.grab(); continue; } Frame frame = g.grab(); if (frame == null) continue; BufferedImage img = Java2DFrameUtils.toBufferedImage(frame); if (img == null) continue; ImageIO.write(img, "png", frameFile); } catch (Throwable t) { } } } finally { g.stop(); g.close(); } }
Example #18
Source File: CalibrationWorker.java From procamcalib with GNU General Public License v2.0 | 4 votes |
public void init() throws Exception { // create arrays and canvas frames on the Event Dispatcher Thread... CameraDevice.Settings[] cs = cameraSettings.toArray(); if (cameraDevices == null) { cameraDevices = new CameraDevice[cs.length]; } else { cameraDevices = Arrays.copyOf(cameraDevices, cs.length); } cameraCanvasFrames = new CanvasFrame[cs.length]; frameGrabbers = new FrameGrabber[cs.length]; cameraFrameConverters = new OpenCVFrameConverter.ToIplImage[cs.length]; for (int i = 0; i < cs.length; i++) { if (cameraDevices[i] == null) { cameraDevices[i] = new CameraDevice(cs[i]); } else { cameraDevices[i].setSettings(cs[i]); } if (cameraSettings.getMonitorWindowsScale() > 0) { cameraCanvasFrames[i] = new CanvasFrame(cs[i].getName()); cameraCanvasFrames[i].setCanvasScale(cameraSettings.getMonitorWindowsScale()); } } ProjectorDevice.Settings[] ps = projectorSettings.toArray(); if (projectorDevices == null) { projectorDevices = new ProjectorDevice[ps.length]; } else { projectorDevices = Arrays.copyOf(projectorDevices, ps.length); } projectorCanvasFrames = new CanvasFrame[ps.length]; projectorPlanes = new MarkedPlane[ps.length]; projectorFrameConverters = new OpenCVFrameConverter.ToIplImage[ps.length]; for (int i = 0; i < ps.length; i++) { if (projectorDevices[i] == null) { projectorDevices[i] = new ProjectorDevice(ps[i]); } else { projectorDevices[i].setSettings(ps[i]); } projectorCanvasFrames[i] = projectorDevices[i].createCanvasFrame(); projectorCanvasFrames[i].showColor(Color.BLACK); projectorFrameConverters[i] = new OpenCVFrameConverter.ToIplImage(); Dimension dim = projectorCanvasFrames[i].getSize(); projectorPlanes[i] = new MarkedPlane(dim.width, dim.height, markers[1], true, cvScalarAll(((ProjectorDevice.CalibrationSettings)ps[0]).getBrightnessForeground()*255), cvScalarAll(((ProjectorDevice.CalibrationSettings)ps[0]).getBrightnessBackground()*255), 4); } }
Example #19
Source File: CameraOpenKinect.java From PapARt with GNU Lesser General Public License v3.0 | 4 votes |
@Override public void internalStart() throws FrameGrabber.Exception { grabber.start(); }
Example #20
Source File: MainFrame.java From procamcalib with GNU General Public License v2.0 | 4 votes |
void buildSettingsView() throws IntrospectionException, PropertyVetoException { HashMap<String, Class<? extends PropertyEditor>> editors = new HashMap<String, Class<? extends PropertyEditor>>(); editors.put("frameGrabber", FrameGrabber.PropertyEditor.class); // hide settings we do not need from the user... editors.put("triggerMode", null); editors.put("imageMode", null); editors.put("timeout", null); editors.put("deviceFilename", null); editors.put("useOpenGL", null); // editors.put("nominalDistance", null); if (cameraSettings == null) { cameraSettings = new CameraSettings(); cameraSettings.setFrameGrabber(FrameGrabber.getDefault()); cameraSettings.setQuantity(1); } cameraSettings.addPropertyChangeListener(this); BeanNode cameraNode = new CleanBeanNode<CameraSettings> (cameraSettings, editors, "Cameras"); if (projectorSettings == null) { projectorSettings = new ProjectorSettings(); projectorSettings.setQuantity(1); } projectorSettings.addPropertyChangeListener(this); BeanNode projectorNode = new CleanBeanNode<ProjectorSettings> (projectorSettings, editors, "Projectors"); if (markerSettings == null) { markerSettings = new Marker.ArraySettings(); } markerSettings.addPropertyChangeListener(this); BeanNode markerNode = new CleanBeanNode<Marker.ArraySettings> (markerSettings, null, "MarkerPatterns"); if (markerDetectorSettings == null) { markerDetectorSettings = new MarkerDetector.Settings(); } BeanNode detectorNode = new CleanBeanNode<MarkerDetector.Settings> (markerDetectorSettings, null, "MarkerDetector"); if (geometricCalibratorSettings == null) { geometricCalibratorSettings = new CalibrationWorker.GeometricSettings(); } BeanNode geometricCalibratorNode = new CleanBeanNode<CalibrationWorker.GeometricSettings> (geometricCalibratorSettings, null, "GeometricCalibrator"); if (colorCalibratorSettings == null) { colorCalibratorSettings = new CalibrationWorker.ColorSettings(); } colorCalibratorSettings.addPropertyChangeListener(this); BeanNode colorCalibratorNode = new CleanBeanNode<CalibrationWorker.ColorSettings> (colorCalibratorSettings, null, "ColorCalibrator"); Children children = new Children.Array(); children.add(new Node[] { cameraNode, projectorNode, markerNode, detectorNode, geometricCalibratorNode, colorCalibratorNode }); Node root = new AbstractNode(children); root.setName("Settings"); manager.setRootContext(root); }
Example #21
Source File: MainFrame.java From procamcalib with GNU General Public License v2.0 | 4 votes |
/** * @param args the command line arguments */ public static void main(final String args[]) { try { UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); } catch (Exception e) { // this is just a workaround for GTK and OpenCV, should be safe to ignore otherwise } // try to init all frame grabbers here, because bad things // happen if loading errors occur while we're in the GUI thread... FrameGrabber.init(); EventQueue.invokeLater(new Runnable() { public void run() { try { myDirectory = new File(MainFrame.class.getProtectionDomain(). getCodeSource().getLocation().toURI()); if (!myDirectory.isDirectory()) { myDirectory = myDirectory.getParentFile(); } String lafClassName = UIManager.getSystemLookAndFeelClassName(); ArrayList<String> otherArgs = new ArrayList<String>(); for (int i = 0; i < args.length; i++) { if ("--laf".equals(args[i]) && i+1 < args.length) { lafClassName = args[i+1]; i++; } else { otherArgs.add(args[i]); } } // "Ocean Look" would be javax.swing.plaf.metal.MetalLookAndFeel org.netbeans.swing.plaf.Startup.run(Class.forName(lafClassName), 0, null); // Add property editors from NetBeans String[] searchPath = PropertyEditorManager.getEditorSearchPath(); String[] newSearchPath = new String[searchPath.length+1]; newSearchPath[0] = "org.netbeans.beaninfo.editors"; System.arraycopy(searchPath, 0, newSearchPath, 1, searchPath.length); PropertyEditorManager.setEditorSearchPath(newSearchPath); PropertyEditorManager.registerEditor(String[].class, StringArrayEditor.class); // PropertyEditorManager.registerEditor(double[].class, DoubleArrayEditor.class); //Make sure we have nice window decorations. JFrame.setDefaultLookAndFeelDecorated(true); JDialog.setDefaultLookAndFeelDecorated(true); MainFrame w = new MainFrame(otherArgs.toArray(new String[0])); w.setLocationByPlatform(true); w.setVisible(true); } catch (Exception ex) { Logger.getLogger(MainFrame.class.getName()).log(Level.SEVERE, "Could not start ProCamCalib", ex); } } }); }
Example #22
Source File: MainFrame.java From procamtracker with GNU General Public License v2.0 | 4 votes |
void buildSettingsView() throws IntrospectionException, PropertyVetoException { HashMap<String, Class<? extends PropertyEditor>> editors = new HashMap<String, Class<? extends PropertyEditor>>(); editors.put("frameGrabber", FrameGrabber.PropertyEditor.class); // hide settings we do not need from the user... editors.put("triggerMode", null); editors.put("imageMode", null); editors.put("timeout", null); editors.put("parametersFilename", null); editors.put("deviceFilename", null); editors.put("useOpenGL", null); editors.put("objectImage", null); editors.put("gammaTgamma", null); editors.put("outputVideoFilename", null); editors.put("textureImageFilename", null); editors.put("projectorImageFilename", null); editors.put("projectorVideoFilename", null); editors.put("initialRoiPts", null); editors.put("initialPosition", null); if (cameraSettings == null) { cameraSettings = new CameraDevice.CalibratedSettings(); cameraSettings.setFrameGrabber(FrameGrabber.getDefault()); } cameraSettings.addPropertyChangeListener(this); BeanNode cameraNode = new CleanBeanNode<CameraDevice.Settings> (cameraSettings, editors, "Camera"); if (projectorSettings == null) { projectorSettings = new ProjectorDevice.CalibratedSettings(); } projectorSettings.addPropertyChangeListener(this); BeanNode projectorNode = new CleanBeanNode<ProjectorDevice.Settings> (projectorSettings, editors, "Projector"); if (objectFinderSettings == null) { objectFinderSettings = new ObjectFinder.Settings(); } objectFinderSettings.addPropertyChangeListener(this); BeanNode objectFinderNode = new CleanBeanNode<ObjectFinder.Settings> (objectFinderSettings, editors, "ObjectFinder"); if (markerDetectorSettings == null) { markerDetectorSettings = new MarkerDetector.Settings(); } markerDetectorSettings.addPropertyChangeListener(this); BeanNode markerDetectorNode = new CleanBeanNode<MarkerDetector.Settings> (markerDetectorSettings, editors, "MarkerDetector"); if (alignerSettings == null) { alignerSettings = new GNImageAligner.Settings(); } BeanNode alignerNode = new CleanBeanNode<GNImageAligner.Settings> (alignerSettings, editors, "GNImageAligner"); if (handMouseSettings == null) { handMouseSettings = new HandMouse.Settings(); } BeanNode handMouseNode = new CleanBeanNode<HandMouse.Settings> (handMouseSettings, editors, "HandMouse"); if (virtualBallSettings == null) { virtualBallSettings = new VirtualBall.Settings(); } BeanNode virtualBallNode = new CleanBeanNode<VirtualBall.Settings> (virtualBallSettings, editors, "VirtualBall"); if (realityAugmentorSettings == null) { realityAugmentorSettings = new RealityAugmentor.Settings(); RealityAugmentor.ObjectSettings os = new RealityAugmentor.ObjectSettings(); RealityAugmentor.VirtualSettings vs = new RealityAugmentor.VirtualSettings(); os.add(vs); realityAugmentorSettings.add(os); } BeanNode realityAugmentorNode = new CleanBeanNode<RealityAugmentor.Settings> (realityAugmentorSettings, editors, "RealityAugmentor"); if (trackingSettings == null) { trackingSettings = new TrackingWorker.Settings(); } BeanNode trackingNode = new CleanBeanNode<TrackingWorker.Settings> (trackingSettings, editors, "TrackingWorker"); Children children = new Children.Array(); children.add(new Node[] { cameraNode, projectorNode, objectFinderNode, markerDetectorNode, alignerNode, handMouseNode, virtualBallNode, realityAugmentorNode, trackingNode }); Node root = new AbstractNode(children); root.setName("Settings"); manager.setRootContext(root); }
Example #23
Source File: MainFrame.java From procamtracker with GNU General Public License v2.0 | 4 votes |
/** * @param args the command line arguments */ public static void main(final String args[]) { try { UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); } catch (Exception e) { // this is just a workaround for GTK and OpenCV, should be safe to ignore otherwise } try { Threading.disableSingleThreading(); //System.setProperty("sun.java2d.opengl","false"); GLProfile.initSingleton(); } catch (Throwable t) { } // try to init all frame grabbers here, because bad things // happen if loading errors occur while we're in the GUI thread... FrameGrabber.init(); EventQueue.invokeLater(new Runnable() { public void run() { try { myDirectory = new File(MainFrame.class.getProtectionDomain(). getCodeSource().getLocation().toURI()); if (!myDirectory.isDirectory()) { myDirectory = myDirectory.getParentFile(); } String lafClassName = UIManager.getSystemLookAndFeelClassName(); ArrayList<String> otherArgs = new ArrayList<String>(); for (int i = 0; i < args.length; i++) { if ("--laf".equals(args[i]) && i+1 < args.length) { lafClassName = args[i+1]; i++; } else { otherArgs.add(args[i]); } } // "Ocean Look" would be javax.swing.plaf.metal.MetalLookAndFeel org.netbeans.swing.plaf.Startup.run(Class.forName(lafClassName), 0, null); // Add property editors from NetBeans String[] searchPath = PropertyEditorManager.getEditorSearchPath(); String[] newSearchPath = new String[searchPath.length+1]; newSearchPath[0] = "org.netbeans.beaninfo.editors"; System.arraycopy(searchPath, 0, newSearchPath, 1, searchPath.length); PropertyEditorManager.setEditorSearchPath(newSearchPath); PropertyEditorManager.registerEditor(String[].class, StringArrayEditor.class); PropertyEditorManager.registerEditor(double[].class, DoubleArrayEditor.class); //Make sure we have nice window decorations. JFrame.setDefaultLookAndFeelDecorated(true); JDialog.setDefaultLookAndFeelDecorated(true); MainFrame w = new MainFrame(otherArgs.toArray(new String[0])); w.setLocationByPlatform(true); w.setVisible(true); w.messagesio = new NbIOProvider().getIO("Messages", new Action[0], IOContainer.getDefault()); w.messagesio.select(); } catch (Exception ex) { Logger.getLogger(MainFrame.class.getName()).log(Level.SEVERE, "Could not start ProCamTracker", ex); } } }); }
Example #24
Source File: CameraRealSense.java From PapARt with GNU Lesser General Public License v3.0 | 4 votes |
@Override public void internalStart() throws FrameGrabber.Exception { if (useIR && !useDepth && !useColor) { IRCamera.setFrameRate(60); // 200 max -> buggy } if (useColor) { grabber.setImageWidth(colorCamera.width()); grabber.setImageHeight(colorCamera.height()); grabber.setFrameRate(colorCamera.getFrameRate()); grabber.enableColorStream(); } if (useIR) { grabber.setIRImageWidth(IRCamera.width()); grabber.setIRImageHeight(IRCamera.height()); grabber.setIRFrameRate(IRCamera.getFrameRate()); grabber.enableIRStream(); } if (useDepth) { grabber.setDepthImageWidth(depthCamera.width()); grabber.setDepthImageHeight(depthCamera.height()); grabber.setDepthFrameRate(depthCamera.getFrameRate()); grabber.enableDepthStream(); } grabber.start(); // grabber.setPreset(3); // Override the calibration... if (useHardwareIntrinsics) { if (useColor) { useHarwareIntrinsics(colorCamera, grabber); } if (useIR) { useHarwareIntrinsics(IRCamera, grabber); } if (useDepth) { useHarwareIntrinsics(depthCamera, grabber); } } }
Example #25
Source File: CameraOpenNI2.java From PapARt with GNU Lesser General Public License v3.0 | 4 votes |
@Override public void internalStart() throws FrameGrabber.Exception { if (useColor) { colorStream = initStream( // PixelFormat.BGR, PixelFormat.RGB, org.openni.PixelFormat.RGB888, SensorType.COLOR, new FrameListener(colorCamera), colorCamera); colorStream.setMirroringEnabled(false); colorCamera.setUndistort(false); colorStream.start(); } if (useIR) { IRStream = initStream( PixelFormat.RGB, org.openni.PixelFormat.RGB888, SensorType.IR, new FrameListener(IRCamera), IRCamera); IRStream.setMirroringEnabled(false); IRCamera.setUndistort(false); IRStream.start(); } if (useDepth) { depthStream = initStream( PixelFormat.OPENNI_2_DEPTH, org.openni.PixelFormat.DEPTH_1_MM, SensorType.DEPTH, new FrameListener(depthCamera), depthCamera); depthStream.setMirroringEnabled(false); depthCamera.setUndistort(false); depthStream.start(); } // grabber.start(); // // // Override the calibration... // if (useHardwareIntrinsics) { // if (useColor) { // useHarwareIntrinsics(colorCamera, grabber); // } // if (useIR) { // useHarwareIntrinsics(IRCamera, grabber); // } // if (useDepth) { // useHarwareIntrinsics(depthCamera, grabber); // } // } }
Example #26
Source File: CameraOpenKinect2.java From PapARt with GNU Lesser General Public License v3.0 | 4 votes |
@Override public void internalStart() throws FrameGrabber.Exception { grabber.start(); }
Example #27
Source File: VideoPlayer.java From Java-Machine-Learning-for-Computer-Vision with MIT License | 4 votes |
private FFmpegFrameGrabber initFrameGrabber(String videoFileName) throws FrameGrabber.Exception { FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(videoFileName); grabber.start(); return grabber; }
Example #28
Source File: VideoPlayer.java From Java-Machine-Learning-for-Computer-Vision with MIT License | 4 votes |
private FFmpegFrameGrabber initFrameGrabber(String videoFileName) throws FrameGrabber.Exception { FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(new File(videoFileName)); grabber.start(); return grabber; }
Example #29
Source File: VideoDecoderService.java From Spring with Apache License 2.0 | 4 votes |
public FFmpegFrameGrabber read(File video) throws FrameGrabber.Exception { FFmpegFrameGrabber frameGrabber = new FFmpegFrameGrabber(video); frameGrabber.start(); return frameGrabber; }
Example #30
Source File: LivePlayTest2.java From oim-fx with MIT License | 4 votes |
/** * 转流器 * * @param inputFile * @param outputFile * @throws Exception * @throws org.bytedeco.javacv.FrameRecorder.Exception * @throws InterruptedException */ public static void recordPush(String inputFile, int v_rs) throws Exception, org.bytedeco.javacv.FrameRecorder.Exception, InterruptedException { Loader.load(opencv_objdetect.class); long startTime = 0; FrameGrabber grabber = FFmpegFrameGrabber.createDefault(inputFile); try { grabber.start(); } catch (Exception e) { try { grabber.restart(); } catch (Exception e1) { throw e; } } OpenCVFrameConverter.ToIplImage converter = new OpenCVFrameConverter.ToIplImage(); Frame grabframe = grabber.grab(); IplImage grabbedImage = null; if (grabframe != null) { System.out.println("取到第一帧"); grabbedImage = converter.convert(grabframe); } else { System.out.println("没有取到第一帧"); } System.out.println("开始推流"); CanvasFrame frame = new CanvasFrame("camera", CanvasFrame.getDefaultGamma() / grabber.getGamma()); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.setAlwaysOnTop(true); while (frame.isVisible() && (grabframe = grabber.grab()) != null) { System.out.println("推流..."); frame.showImage(grabframe); grabbedImage = converter.convert(grabframe); Frame rotatedFrame = converter.convert(grabbedImage); if (startTime == 0) { startTime = System.currentTimeMillis(); } Thread.sleep(40); } frame.dispose(); grabber.stop(); System.exit(2); }