boofcv.abst.feature.detdesc.DetectDescribePoint Java Examples
The following examples show how to use
boofcv.abst.feature.detdesc.DetectDescribePoint.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ImageDesc.java From MtgDesktopCompanion with GNU General Public License v3.0 | 6 votes |
public static FastQueue<BrightFeature> readDescIn(ByteBuffer buf,DetectDescribePoint<GrayF32,BrightFeature> ddp) { FastQueue<BrightFeature> d = UtilFeature.createQueue(ddp,0); int dts = buf.getInt(); for(int i=0;i<dts;i++) { int vs = buf.getInt(); BrightFeature f = new BrightFeature(vs); double[] vls = new double[vs]; for(int j=0;j<vs;j++) { vls[j]=buf.getDouble(); } f.set(vls); d.add(f); } return d; }
Example #2
Source File: SURFExtractor.java From multimedia-indexing with Apache License 2.0 | 6 votes |
/** * Detects key points inside the image and computes descriptions at those points. */ protected double[][] extractFeaturesInternal(BufferedImage image) { ImageFloat32 boofcvImage = ConvertBufferedImage.convertFromSingle(image, null, ImageFloat32.class); // create the SURF detector and descriptor in BoofCV v0.15 ConfigFastHessian conf = new ConfigFastHessian(detectThreshold, 2, maxFeaturesPerScale, 2, 9, 4, 4); DetectDescribePoint<ImageFloat32, SurfFeature> surf = FactoryDetectDescribe.surfStable(conf, null, null, ImageFloat32.class); // specify the image to process surf.detect(boofcvImage); int numPoints = surf.getNumberOfFeatures(); double[][] descriptions = new double[numPoints][SURFLength]; for (int i = 0; i < numPoints; i++) { descriptions[i] = surf.getDescription(i).getValue(); } return descriptions; }
Example #3
Source File: SIFTExtractor.java From multimedia-indexing with Apache License 2.0 | 6 votes |
/** * Detects key points inside the image and computes descriptions at those points. */ protected double[][] extractFeaturesInternal(BufferedImage image) { ImageFloat32 boofcvImage = ConvertBufferedImage.convertFromSingle(image, null, ImageFloat32.class); // create the SIFT detector and descriptor in BoofCV v0.15 ConfigSiftDetector conf = new ConfigSiftDetector(2, detectThreshold, maxFeaturesPerScale, 5); DetectDescribePoint<ImageFloat32, SurfFeature> sift = FactoryDetectDescribe.sift(null, conf, null, null); // specify the image to process sift.detect(boofcvImage); int numPoints = sift.getNumberOfFeatures(); double[][] descriptions = new double[numPoints][SIFTLength]; for (int i = 0; i < numPoints; i++) { descriptions[i] = sift.getDescription(i).getValue(); } return descriptions; }
Example #4
Source File: SURFCodebookGenerator.java From cineast with MIT License | 5 votes |
/** * Processes the content (i.e. creates descriptors) and add the generated * descriptors to the cluster. * * @param content The image to process. */ @Override protected void process(BufferedImage content) { DetectDescribePoint<GrayF32, BrightFeature> surf = SURFHelper.getFastSurf(content); for (int i=0;i<surf.getNumberOfFeatures();i++) { this.cluster.addReference(surf.getDescription(i)); } }
Example #5
Source File: SURF.java From cineast with MIT License | 5 votes |
@Override public void processSegment(SegmentContainer shot) { if (shot.getMostRepresentativeFrame() == VideoFrame.EMPTY_VIDEO_FRAME) { return; } DetectDescribePoint<GrayF32, BrightFeature> descriptors = SURFHelper.getStableSurf(shot.getMostRepresentativeFrame().getImage().getBufferedImage()); if (descriptors != null && descriptors.getNumberOfFeatures() > 0) { float[] histogram_f = this.histogram(true, descriptors); this.persist(shot.getId(), new FloatVectorImpl(histogram_f)); } else { LOGGER.warn("No SURF feature could be extracted for segment {}. This is not necessarily an error!", shot.getId()); } }
Example #6
Source File: SURF.java From cineast with MIT License | 5 votes |
/** * This method represents the first step that's executed when processing query. The associated SegmentContainer is * examined and feature-vectors are being generated. The generated vectors are returned by this method together with an * optional weight-vector. * <p> * <strong>Important: </strong> The weight-vector must have the same size as the feature-vectors returned by the method. * * @param sc SegmentContainer that was submitted to the feature module. * @param qc A QueryConfig object that contains query-related configuration parameters. Can still be edited. * @return List of feature vectors for lookup. */ @Override protected List<float[]> preprocessQuery(SegmentContainer sc, ReadableQueryConfig qc) { /* Prepare feature pair. */ List<float[]> features = new ArrayList<>(1); /* Extract features. */ DetectDescribePoint<GrayF32, BrightFeature> descriptors = SURFHelper.getStableSurf(sc.getAvgImg().getBufferedImage()); if (descriptors != null && descriptors.getNumberOfFeatures() > 0) { features.add(this.histogram(true, descriptors)); } return features; }
Example #7
Source File: SURFHelper.java From cineast with MIT License | 5 votes |
/** * Returns SURF descriptors for an image using the settings above. Uses the BoofCV stable SURF algorithm. * * @param image Image for which to obtain the SURF descriptors. * @return */ public static DetectDescribePoint<GrayF32, BrightFeature> getStableSurf(BufferedImage image) { /* Obtain raw SURF descriptors using the configuration above (FH-9 according to [1]). */ GrayF32 gray = ConvertBufferedImage.convertFromSingle(image, null, GrayF32.class); ConfigFastHessian config = new ConfigFastHessian(0, 2, FH_MAX_FEATURES_PER_SCALE, FH_INITIAL_SAMPLE_SIZE, FH_INITIAL_SIZE, FH_NUMBER_SCALES_PER_OCTAVE, FH_NUMBER_OF_OCTAVES); DetectDescribePoint<GrayF32, BrightFeature> surf = FactoryDetectDescribe.surfStable(config, null, null, GrayF32.class); surf.detect(gray); return surf; }
Example #8
Source File: SURFHelper.java From cineast with MIT License | 5 votes |
/** * Returns SURF descriptors for an image using the settings above. Uses the BoofCV fast SURF algorithm, * which yields less images but operates a bit faster. * * @param image Image for which to obtain the SURF descriptors. * @return */ public static DetectDescribePoint<GrayF32, BrightFeature> getFastSurf(BufferedImage image) { /* Obtain raw SURF descriptors using the configuration above (FH-9 according to [1]). */ GrayF32 gray = ConvertBufferedImage.convertFromSingle(image, null, GrayF32.class); ConfigFastHessian config = new ConfigFastHessian(0, 2, FH_MAX_FEATURES_PER_SCALE, FH_INITIAL_SAMPLE_SIZE, FH_INITIAL_SIZE, FH_NUMBER_SCALES_PER_OCTAVE, FH_NUMBER_OF_OCTAVES); DetectDescribePoint<GrayF32, BrightFeature> surf = FactoryDetectDescribe.surfFast(config, null, null, GrayF32.class); surf.detect(gray); return surf; }