Java Code Examples for ij.process.ColorProcessor#getPixels()

The following examples show how to use ij.process.ColorProcessor#getPixels() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MatImagePlusConverter.java    From IJ-OpenCV with GNU General Public License v3.0 6 votes vote down vote up
private static ColorProcessor makeColorProcessor(Mat mat) {
    if (mat.type() != opencv_core.CV_8UC3) {
        throw new IllegalArgumentException("wrong Mat type: " + mat.type());
    }
    final int w = mat.cols();
    final int h = mat.rows();
    byte[] pixels = new byte[w * h * (int) mat.elemSize()];
    mat.data().get(pixels);
    // convert byte array to int-encoded RGB values
    ColorProcessor cp = new ColorProcessor(w, h);
    int[] iData = (int[]) cp.getPixels();
    for (int i = 0; i < w * h; i++) {
        int red = pixels[i * 3 + 0] & 0xff;
        int grn = pixels[i * 3 + 1] & 0xff;
        int blu = pixels[i * 3 + 2] & 0xff;
        iData[i] = (red << 16) | (grn << 8) | blu;
    }
    return cp;
}
 
Example 2
Source File: Util.java    From TrakEM2 with GNU General Public License v3.0 6 votes vote down vote up
/**
 * <p>Transfer and ARGB AWT image into a FloatProcessor with its grey values
 * and a FloatProcessor with its alpha values as [0...1].</p>
 * 
 * <p><em>Note</em>, this method currently relies on how ImageJ reuses the
 * pixels of an AWT image as generated by {@link Loader#getFlatAWTImage(ini.trakem2.display.Layer, java.awt.Rectangle, double, int, int, Class, java.util.List, boolean, java.awt.Color, ini.trakem2.display.Displayable) Loader.getFlatAWTImage(...)}
 * for creating a ColorProcessor.  This may change in the future as have
 * many things in the past.  This method is then the place to fix it. 
 * 
 * @param input
 * @param output
 * @param alpha
 */
final static public void imageToFloatAndMask( final Image input, final FloatProcessor output, final FloatProcessor alpha )
{
	final ColorProcessor cp = new ColorProcessor( input );
	final int[] inputPixels = ( int[] )cp.getPixels();
	for ( int i = 0; i < inputPixels.length; ++i )
	{
		final int argb = inputPixels[ i ];
		final int a = ( argb >> 24 ) & 0xff;
		final int r = ( argb >> 16 ) & 0xff;
		final int g = ( argb >> 8 ) & 0xff;
		final int b = argb & 0xff;
		
		final float v = ( r + g + b ) / ( float )3;
		final float w = a / ( float )255;
		
		output.setf( i, v );
		alpha.setf( i, w );
	}
}
 
Example 3
Source File: OpenCLRaycaster.java    From 3Dscript with BSD 2-Clause "Simplified" License 5 votes vote down vote up
public void setBackground(ColorProcessor cp, boolean combinedAlpha, boolean mip, boolean[] useLights, boolean[] colorLUT) {
	if(cp == null) {
		clearBackground();
		setKernel(OpenCLProgram.makeSource(nChannels, false, combinedAlpha, mip, useLights, colorLUT));
		return;
	}
	int[] rgb = (int[])cp.getPixels();
	setBackground(rgb, cp.getWidth(), cp.getHeight());
	// setKernel(OpenCLProgram.makeSourceForMIP(nChannels, true));
	setKernel(OpenCLProgram.makeSource(nChannels, true, combinedAlpha, mip, useLights, colorLUT));
}
 
Example 4
Source File: ImagePlusMatConverter.java    From IJ-OpenCV with GNU General Public License v3.0 5 votes vote down vote up
/**
 * Duplicates {@link ColorProcessor} to the corresponding OpenCV image of
 * type {@link Mat}.
 *
 * @param cp The {@link ColorProcessor} to be converted
 * @return The OpenCV image (of type {@link Mat})
 */
public static Mat toMat(ColorProcessor cp) {
    final int w = cp.getWidth();
    final int h = cp.getHeight();
    final int[] pixels = (int[]) cp.getPixels();
    byte[] bData = new byte[w * h * 3];

    // convert int-encoded RGB values to byte array
    for (int i = 0; i < pixels.length; i++) {
        bData[i * 3 + 0] = (byte) ((pixels[i] >> 16) & 0xFF);	// red
        bData[i * 3 + 1] = (byte) ((pixels[i] >> 8) & 0xFF);	// grn
        bData[i * 3 + 2] = (byte) ((pixels[i]) & 0xFF);	// blu
    }
    return new Mat(h, w, opencv_core.CV_8UC3, new BytePointer(bData));
}
 
Example 5
Source File: FloatProcessorT2.java    From TrakEM2 with GNU General Public License v3.0 5 votes vote down vote up
public FloatProcessorT2(final ColorProcessor cp, final int channel) {
	this(cp.getWidth(), cp.getHeight(), 0, 255);
	final int[] c = (int[])cp.getPixels();
	int bitmask = 0;
	int shift = 0;
	switch (channel) {
		case 0: bitmask = 0x00ff0000; shift = 16; break; // red
		case 1: bitmask = 0x0000ff00; shift =  8; break; // green
		case 2: bitmask = 0x000000ff; break; // blue
	}
	final float[] pixels = (float[])this.getPixels(); // I luv pointlessly private fields
	for (int i=0; i<pixels.length; i++) pixels[i] = ((c[i] & bitmask)>>shift);
	super.setMinAndMax(0, 255); // we know them
}
 
Example 6
Source File: ArgbRenderer.java    From render with GNU General Public License v2.0 4 votes vote down vote up
/**
 * Converts the processor to an ARGB image.
 *
 * @param  renderedImageProcessorWithMasks  processor to convert.
 * @param  binaryMask                       indicates whether a binary mask should be applied.
 *
 * @return the converted image.
 */
public static BufferedImage targetToARGBImage(final ImageProcessorWithMasks renderedImageProcessorWithMasks,
                                              final boolean binaryMask) {

    // convert to 24bit RGB
    final ColorProcessor cp = renderedImageProcessorWithMasks.ip.convertToColorProcessor();

    // set alpha channel
    final int[] cpPixels = (int[]) cp.getPixels();
    final byte[] alphaPixels;

    if (renderedImageProcessorWithMasks.mask != null) {
        alphaPixels = (byte[]) renderedImageProcessorWithMasks.mask.getPixels();
    } else if (renderedImageProcessorWithMasks.outside != null) {
        alphaPixels = (byte[]) renderedImageProcessorWithMasks.outside.getPixels();
    } else {
        alphaPixels = null;
    }

    if (alphaPixels == null) {
        for (int i = 0; i < cpPixels.length; ++i) {
            cpPixels[i] &= 0xffffffff;
        }
    } else if (binaryMask) {
        for (int i = 0; i < cpPixels.length; ++i) {
            if (alphaPixels[i] == -1)
                cpPixels[i] &= 0xffffffff;
            else
                cpPixels[i] &= 0x00ffffff;
        }
    } else {
        for (int i = 0; i < cpPixels.length; ++i) {
            cpPixels[i] &= 0x00ffffff | (alphaPixels[i] << 24);
        }
    }

    final BufferedImage image = new BufferedImage(cp.getWidth(), cp.getHeight(), BufferedImage.TYPE_INT_ARGB);
    final WritableRaster raster = image.getRaster();
    raster.setDataElements(0, 0, cp.getWidth(), cp.getHeight(), cpPixels);

    return image;
}
 
Example 7
Source File: IntegralImageMipMaps.java    From TrakEM2 with GNU General Public License v3.0 4 votes vote down vote up
@SuppressWarnings({ "unused", "unchecked", "null" })
private static final BufferedImage[] createRGB(
		final Patch patch,
		final ColorProcessor ip,
		final ByteProcessor mask) {
	final int w = ip.getWidth();
	final int h = ip.getHeight();
	final int[] dims = new int[]{w, h};
	final ScaleAreaAveraging2d<LongType, UnsignedByteType> saar, saag, saab, saam;
	{
		// Split color channels
		final int[] p = (int[]) ip.getPixels();
		final byte[] r = new byte[p.length],
		             g = new byte[p.length],
		             b = new byte[p.length];
		for (int i=0; i<p.length; ++i) {
			final int a = p[i];
			r[i] = (byte)((a >> 16)&0xff);
			g[i] = (byte)((a >> 8)&0xff);
			b[i] = (byte)(a&0xff);
		}
		//
		saar = saa(r, dims);
		saag = saa(g, dims);
		saab = saa(b, dims);
		saam = null == mask? null : saa((byte[])mask.getPixels(), dims);
	}
	
	// Generate images
	final BufferedImage[] bis = new BufferedImage[Loader.getHighestMipMapLevel(patch) + 1];
	//
	if (null == saam) { // mask is null
		bis[0] = ImageSaver.createARGBImage((int[])ip.getPixels(), w, h); // sharing the int[] pixels
		for (int i=1; i<bis.length; i++) {
			final int K = (int) Math.pow(2, i),
		          wk = w / K,
		          hk = h / K;
			// An image of the scaled size
			saar.setOutputDimensions(wk, hk);
			saar.process();
			saag.setOutputDimensions(wk, hk);
			saag.process();
			saab.setOutputDimensions(wk, hk);
			saab.process();
			bis[i] = ImageSaver.createARGBImage(
				blend(((Array<UnsignedByteType,ByteArray>) saar.getResult().getContainer()).update(null).getCurrentStorageArray(),
					  ((Array<UnsignedByteType,ByteArray>) saag.getResult().getContainer()).update(null).getCurrentStorageArray(),
					  ((Array<UnsignedByteType,ByteArray>) saab.getResult().getContainer()).update(null).getCurrentStorageArray()),
				wk, hk);
		}
	} else {
		// With alpha channel
		bis[0] = ImageSaver.createARGBImage(blend((int[])ip.getPixels(), (byte[])mask.getPixels()), w, h); // sharing the int[] pixels
		for (int i=1; i<bis.length; i++) {
			final int K = (int) Math.pow(2, i),
		          wk = w / K,
		          hk = h / K;
			// An image of the scaled size
			saar.setOutputDimensions(wk, hk);
			saar.process();
			saag.setOutputDimensions(wk, hk);
			saag.process();
			saab.setOutputDimensions(wk, hk);
			saab.process();
			saam.setOutputDimensions(wk, hk);
			saam.process();
			bis[i] = ImageSaver.createARGBImage(
				blend(((Array<UnsignedByteType,ByteArray>) saar.getResult().getContainer()).update(null).getCurrentStorageArray(),
					  ((Array<UnsignedByteType,ByteArray>) saag.getResult().getContainer()).update(null).getCurrentStorageArray(),
					  ((Array<UnsignedByteType,ByteArray>) saab.getResult().getContainer()).update(null).getCurrentStorageArray(),
					  ((Array<UnsignedByteType,ByteArray>) saam.getResult().getContainer()).update(null).getCurrentStorageArray()),
				wk, hk);
		}
	}
	
	return bis;
}
 
Example 8
Source File: IntegralImageMipMaps.java    From TrakEM2 with GNU General Public License v3.0 4 votes vote down vote up
private static final ImageBytes[] fastCreateRGB(
		final Patch patch,
		final ColorProcessor ip,
		final ByteProcessor mask) {
	final int w = ip.getWidth();
	final int h = ip.getHeight();
	
	final long[] ir, ig, ib, im;
	{
		// Split color channels
		final int[] p = (int[]) ip.getPixels();
		final byte[] r = new byte[p.length],
		             g = new byte[p.length],
		             b = new byte[p.length];
		for (int i=0; i<p.length; ++i) {
			final int a = p[i];
			r[i] = (byte)((a >> 16)&0xff);
			g[i] = (byte)((a >> 8)&0xff);
			b[i] = (byte)(a&0xff);
		}
		//
		ir = FastIntegralImage.longIntegralImage(r, w, h);
		ig = FastIntegralImage.longIntegralImage(g, w, h);
		ib = FastIntegralImage.longIntegralImage(b, w, h);
		im = null == mask ? null : FastIntegralImage.longIntegralImage((byte[])mask.getPixels(), w, h);
	}
	
	// Generate images
	final ImageBytes[] bis = new ImageBytes[Loader.getHighestMipMapLevel(patch) + 1];
	//
	if (null == mask) {
		bis[0] = new ImageBytes(P.asRGBBytes((int[])ip.getPixels()), ip.getWidth(), ip.getHeight());
		for (int i=1; i<bis.length; i++) {
			final int K = (int) Math.pow(2, i),
		          wk = w / K,
		          hk = h / K;
			// An image of the scaled size
			bis[i] = new ImageBytes(new byte[][]{FastIntegralImage.scaleAreaAverage(ir, w+1, h+1, wk, hk),
			                                     FastIntegralImage.scaleAreaAverage(ig, w+1, h+1, wk, hk),
			                                     FastIntegralImage.scaleAreaAverage(ib, w+1, h+1, wk, hk)},
			                        wk, hk);
		}
	} else {
		// With alpha channel
		bis[0] = new ImageBytes(P.asRGBABytes((int[])ip.getPixels(), (byte[])mask.getPixels(), null), ip.getWidth(), ip.getHeight());
		for (int i=1; i<bis.length; i++) {
			final int K = (int) Math.pow(2, i),
		          wk = w / K,
		          hk = h / K;
			// An image of the scaled size
			bis[i] = new ImageBytes(new byte[][]{FastIntegralImage.scaleAreaAverage(ir, w+1, h+1, wk, hk),
			                                     FastIntegralImage.scaleAreaAverage(ig, w+1, h+1, wk, hk),
			                                     FastIntegralImage.scaleAreaAverage(ib, w+1, h+1, wk, hk),
			                                     FastIntegralImage.scaleAreaAverage(im, w+1, h+1, wk, hk)},
			                        wk, hk);
		}
	}

	return bis;
}