org.bytedeco.javacv.ProjectorDevice Java Examples
The following examples show how to use
org.bytedeco.javacv.ProjectorDevice.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ProjectiveDeviceP.java From PapARt with GNU Lesser General Public License v3.0 | 6 votes |
public static ProjectiveDeviceP loadProjectorDevice(PApplet parent, String filename, int id) throws Exception { ProjectiveDeviceP p = new ProjectiveDeviceP(); if (filename.endsWith((".yaml"))) { try { ProjectorDevice[] camDev = ProjectorDevice.read(filename); if (camDev.length <= id) { throw new Exception("No projector device with the id " + id + " in the calibration file: " + filename); } ProjectorDevice projectorDevice = camDev[id]; p.device = projectorDevice; loadParameters(projectorDevice, p); } catch (Exception e) { throw new Exception("Error reading the calibration file : " + filename + " \n" + e); } } if (filename.endsWith((".xml"))) { ProjectiveDeviceCalibration calib = new ProjectiveDeviceCalibration(); calib.loadFrom(parent, filename); loadParameters(calib, p); } return p; }
Example #2
Source File: MainFrame.java From procamcalib with GNU General Public License v2.0 | 6 votes |
public void propertyChange(PropertyChangeEvent evt) { updatePatterns(evt); if (evt.getSource() == cameraSettings && "frameGrabber".equals(evt.getPropertyName())) { CameraDevice.Settings[] cs = cameraSettings.toArray(); for (CameraDevice.Settings s : cs) { s.setFrameGrabber(cameraSettings.getFrameGrabber()); } } ProjectorDevice.Settings[] ps = projectorSettings.toArray(); if (ps.length <= 0 && colorCalibratorSettings.isEnabled()) { JOptionPane.showMessageDialog(this, "Color calibration requires a projector.", "Cannot enable color calibration", JOptionPane.WARNING_MESSAGE); colorCalibratorSettings.setEnabled(false); return; } }
Example #3
Source File: MainFrame.java From procamtracker with GNU General Public License v2.0 | 5 votes |
void loadSettings(File file) throws IOException, IntrospectionException, PropertyVetoException { if (file == null) { cameraSettings = null; projectorSettings = null; objectFinderSettings = null; markerDetectorSettings = null; alignerSettings = null; handMouseSettings = null; virtualBallSettings = null; realityAugmentorSettings = null; trackingSettings = null; trackingWorker = null; } else { XMLDecoder decoder = new XMLDecoder(new BufferedInputStream(new FileInputStream(file))); cameraSettings = (CameraDevice.Settings)decoder.readObject(); projectorSettings = (ProjectorDevice.Settings)decoder.readObject(); objectFinderSettings = (ObjectFinder.Settings)decoder.readObject(); markerDetectorSettings = (MarkerDetector.Settings)decoder.readObject(); alignerSettings = (GNImageAligner.Settings)decoder.readObject(); handMouseSettings = (HandMouse.Settings)decoder.readObject(); virtualBallSettings = (VirtualBall.Settings)decoder.readObject(); realityAugmentorSettings = (RealityAugmentor.Settings)decoder.readObject(); trackingSettings = (TrackingWorker.Settings)decoder.readObject(); decoder.close(); } settingsFile = file; if (settingsFile == null) { setTitle("ProCamTracker"); } else { setTitle(settingsFile.getName() + " - ProCamTracker"); } buildSettingsView(); if (trackingWorker == null) { statusLabel.setText("Idling."); } }
Example #4
Source File: ARToolkitPlusUtils.java From PapARt with GNU Lesser General Public License v3.0 | 4 votes |
static public void convertProjParam(PApplet pa, String inputYAML, String outputDAT) throws Exception { ProjectorDevice cam = null; ProjectorDevice[] c = ProjectorDevice.read(inputYAML); if (c.length > 0) { cam = c[0]; } ProjectorDevice.Settings projSettings = (org.bytedeco.javacv.ProjectorDevice.Settings) cam.getSettings(); int w = projSettings.getImageWidth(); int h = projSettings.getImageHeight(); double[] mat = cam.cameraMatrix.get(); double[] distort = cam.distortionCoeffs.get(); OutputStream os = pa.createOutput(outputDAT); PrintWriter pw = pa.createWriter(outputDAT); StringBuffer sb = new StringBuffer(); // byte[] buf = new byte[SIZE_OF_PARAM_SET]; // ByteBuffer bb = ByteBuffer.wrap(buf); // bb.order(ByteOrder.BIG_ENDIAN); // bb.putInt(w); // bb.putInt(h); // From ARToolkitPlus... //http://www.vision.caltech.edu/bouguetj/calib_doc/htmls/parameters.html sb.append("ARToolKitPlus_CamCal_Rev02\n"); sb.append(w).append(" ").append(h).append(" "); // cx cy fx fy sb.append(mat[2]).append(" ").append(mat[5]) .append(" ").append(mat[0]). append(" ").append(mat[4]).append(" "); // alpha_c // skew factor sb.append("0 ").append(" "); // alpha_c ? // sb.append("0 "); // kc(1 - x) -> 6 values for (int i = 0; i < distort.length; i++) { sb.append(distort[i]).append(" "); } for (int i = distort.length; i < 6; i++) { sb.append("0 "); } // undist iterations sb.append("10\n"); pw.print(sb); pw.flush(); pw.close(); }
Example #5
Source File: CalibrationWorker.java From procamcalib with GNU General Public License v2.0 | 4 votes |
public void init() throws Exception { // create arrays and canvas frames on the Event Dispatcher Thread... CameraDevice.Settings[] cs = cameraSettings.toArray(); if (cameraDevices == null) { cameraDevices = new CameraDevice[cs.length]; } else { cameraDevices = Arrays.copyOf(cameraDevices, cs.length); } cameraCanvasFrames = new CanvasFrame[cs.length]; frameGrabbers = new FrameGrabber[cs.length]; cameraFrameConverters = new OpenCVFrameConverter.ToIplImage[cs.length]; for (int i = 0; i < cs.length; i++) { if (cameraDevices[i] == null) { cameraDevices[i] = new CameraDevice(cs[i]); } else { cameraDevices[i].setSettings(cs[i]); } if (cameraSettings.getMonitorWindowsScale() > 0) { cameraCanvasFrames[i] = new CanvasFrame(cs[i].getName()); cameraCanvasFrames[i].setCanvasScale(cameraSettings.getMonitorWindowsScale()); } } ProjectorDevice.Settings[] ps = projectorSettings.toArray(); if (projectorDevices == null) { projectorDevices = new ProjectorDevice[ps.length]; } else { projectorDevices = Arrays.copyOf(projectorDevices, ps.length); } projectorCanvasFrames = new CanvasFrame[ps.length]; projectorPlanes = new MarkedPlane[ps.length]; projectorFrameConverters = new OpenCVFrameConverter.ToIplImage[ps.length]; for (int i = 0; i < ps.length; i++) { if (projectorDevices[i] == null) { projectorDevices[i] = new ProjectorDevice(ps[i]); } else { projectorDevices[i].setSettings(ps[i]); } projectorCanvasFrames[i] = projectorDevices[i].createCanvasFrame(); projectorCanvasFrames[i].showColor(Color.BLACK); projectorFrameConverters[i] = new OpenCVFrameConverter.ToIplImage(); Dimension dim = projectorCanvasFrames[i].getSize(); projectorPlanes[i] = new MarkedPlane(dim.width, dim.height, markers[1], true, cvScalarAll(((ProjectorDevice.CalibrationSettings)ps[0]).getBrightnessForeground()*255), cvScalarAll(((ProjectorDevice.CalibrationSettings)ps[0]).getBrightnessBackground()*255), 4); } }
Example #6
Source File: CalibrationWorker.java From procamcalib with GNU General Public License v2.0 | 4 votes |
public void readParameters(File file) throws Exception { String f = file.getAbsolutePath(); cameraDevices = CameraDevice .read(f); projectorDevices = ProjectorDevice.read(f); }
Example #7
Source File: MainFrame.java From procamtracker with GNU General Public License v2.0 | 4 votes |
void buildSettingsView() throws IntrospectionException, PropertyVetoException { HashMap<String, Class<? extends PropertyEditor>> editors = new HashMap<String, Class<? extends PropertyEditor>>(); editors.put("frameGrabber", FrameGrabber.PropertyEditor.class); // hide settings we do not need from the user... editors.put("triggerMode", null); editors.put("imageMode", null); editors.put("timeout", null); editors.put("parametersFilename", null); editors.put("deviceFilename", null); editors.put("useOpenGL", null); editors.put("objectImage", null); editors.put("gammaTgamma", null); editors.put("outputVideoFilename", null); editors.put("textureImageFilename", null); editors.put("projectorImageFilename", null); editors.put("projectorVideoFilename", null); editors.put("initialRoiPts", null); editors.put("initialPosition", null); if (cameraSettings == null) { cameraSettings = new CameraDevice.CalibratedSettings(); cameraSettings.setFrameGrabber(FrameGrabber.getDefault()); } cameraSettings.addPropertyChangeListener(this); BeanNode cameraNode = new CleanBeanNode<CameraDevice.Settings> (cameraSettings, editors, "Camera"); if (projectorSettings == null) { projectorSettings = new ProjectorDevice.CalibratedSettings(); } projectorSettings.addPropertyChangeListener(this); BeanNode projectorNode = new CleanBeanNode<ProjectorDevice.Settings> (projectorSettings, editors, "Projector"); if (objectFinderSettings == null) { objectFinderSettings = new ObjectFinder.Settings(); } objectFinderSettings.addPropertyChangeListener(this); BeanNode objectFinderNode = new CleanBeanNode<ObjectFinder.Settings> (objectFinderSettings, editors, "ObjectFinder"); if (markerDetectorSettings == null) { markerDetectorSettings = new MarkerDetector.Settings(); } markerDetectorSettings.addPropertyChangeListener(this); BeanNode markerDetectorNode = new CleanBeanNode<MarkerDetector.Settings> (markerDetectorSettings, editors, "MarkerDetector"); if (alignerSettings == null) { alignerSettings = new GNImageAligner.Settings(); } BeanNode alignerNode = new CleanBeanNode<GNImageAligner.Settings> (alignerSettings, editors, "GNImageAligner"); if (handMouseSettings == null) { handMouseSettings = new HandMouse.Settings(); } BeanNode handMouseNode = new CleanBeanNode<HandMouse.Settings> (handMouseSettings, editors, "HandMouse"); if (virtualBallSettings == null) { virtualBallSettings = new VirtualBall.Settings(); } BeanNode virtualBallNode = new CleanBeanNode<VirtualBall.Settings> (virtualBallSettings, editors, "VirtualBall"); if (realityAugmentorSettings == null) { realityAugmentorSettings = new RealityAugmentor.Settings(); RealityAugmentor.ObjectSettings os = new RealityAugmentor.ObjectSettings(); RealityAugmentor.VirtualSettings vs = new RealityAugmentor.VirtualSettings(); os.add(vs); realityAugmentorSettings.add(os); } BeanNode realityAugmentorNode = new CleanBeanNode<RealityAugmentor.Settings> (realityAugmentorSettings, editors, "RealityAugmentor"); if (trackingSettings == null) { trackingSettings = new TrackingWorker.Settings(); } BeanNode trackingNode = new CleanBeanNode<TrackingWorker.Settings> (trackingSettings, editors, "TrackingWorker"); Children children = new Children.Array(); children.add(new Node[] { cameraNode, projectorNode, objectFinderNode, markerDetectorNode, alignerNode, handMouseNode, virtualBallNode, realityAugmentorNode, trackingNode }); Node root = new AbstractNode(children); root.setName("Settings"); manager.setRootContext(root); }