processing.core.PImage Java Examples
The following examples show how to use
processing.core.PImage.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ImageCyclerBuffer.java From haxademic with MIT License | 6 votes |
public ImageCyclerBuffer(int w, int h, PImage[] images, int frames, float transitionTime) { pg = P.p.createGraphics(w, h, P.P2D); pg.smooth(8); this.frames = frames; this.transitionTime = transitionTime; // prep & crop fill images imageFrames = new PGraphics[images.length]; for (int i = 0; i < imageFrames.length; i++) { imageFrames[i] = P.p.createGraphics(w, h, P.P2D); imageFrames[i].beginDraw(); imageFrames[i].background(0); imageFrames[i].endDraw(); ImageUtil.cropFillCopyImage(images[i], imageFrames[i], true); } // transitionShader = P.p.loadShader(FileUtil.getFile("haxademic/shaders/transitions/dissolve.glsl")); transitionShader = P.p.loadShader(FileUtil.getPath("haxademic/shaders/transitions/directional-wipe.glsl")); // transitionShader = P.p.loadShader(FileUtil.getFile("haxademic/shaders/transitions/hsv-blend.glsl")); // transitionShader = P.p.loadShader(FileUtil.getFile("haxademic/shaders/transitions/colour-distance.glsl")); transitionShader.set("from", imageFrames[imageIndex]); transitionShader.set("to", imageFrames[nextIndex]); }
Example #2
Source File: ImageUtils.java From Processing.R with GNU General Public License v3.0 | 6 votes |
private static float imgDifference(PImage i0, PImage i1) { float diff = 0; i0.loadPixels(); int[] ip0 = i0.pixels; i1.loadPixels(); int[] ip1 = i1.pixels; for (int n = 0; n < ip0.length; n++) { int pxl0 = ip0[n] int r0, g0, b0; r0 = (pxl0 >> 20) & 0xF; g0 = (pxl0 >> 12) & 0xF; b0 = (pxl0 >> 4) & 0xF; int pxl1 = ip1[n] int r1, g1, b1; r1 = (pxl1 >> 20) & 0xF; g1 = (pxl1 >> 12) & 0xF; b1 = (pxl1 >> 4) & 0xF; diff += PApplet.abs(r0 - r1) + PApplet.abs(g0 - g1) + PApplet.abs(b0 - b1); } // Each colour channel can have a difference 0-15 // Considering 3 colour channels (ignoring alpha) return diff / (ip0.length * 3 * 15); }
Example #3
Source File: AnimationComponent.java From Project-16x16 with GNU General Public License v3.0 | 6 votes |
/** * This method controls the animation of elements (cycles through frames). * * @return PImage image */ @SuppressWarnings("unchecked") public PImage animate() { PImage frame = frames.get((int) currentFrame); if ((applet.frameCount - firstFrame) % rate == 0) { currentFrame++; if (currentFrame > length) { if (!loop) { ended = true; } currentFrame = 0; } } Collection<SFX> coll = (Collection<SFX>) sounds.get((int) currentFrame); // TODO high overhead? if (coll != null) { coll.forEach(sound -> Audio.play(sound)); } return frame; }
Example #4
Source File: TextRepeatDeformMultiline.java From haxademic with MIT License | 6 votes |
protected PImage createCroppedText(String txt) { PImage textCropped = p.createImage(100, 100, P.ARGB); // draw text textBuffer.beginDraw(); textBuffer.clear(); textBuffer.fill(235); textBuffer.textAlign(P.CENTER, P.CENTER); textBuffer.textFont(fontBig); textBuffer.textSize(fontBig.getSize()); textBuffer.text(txt, 0, 0, textBuffer.width, textBuffer.height); textBuffer.endDraw(); // crop text ImageUtil.imageCroppedEmptySpace(textBuffer, textCropped, ImageUtil.EMPTY_INT, false, new int[] {20, 0, 20, 0}, new int[] {0, 0, 0, 0}, p.color(0, 0)); return textCropped; }
Example #5
Source File: TimeLapse.java From haxademic with MIT License | 6 votes |
protected void drawApp() { p.background(0); PG.setColorForPImage(p); PG.setPImageAlpha(p, (p.frameCount % 2 == 1) ? 0.999f : 1 ); // stupid hack b/c UMovieMaker doesn't save the exact same frame twice in a row. // load and display current image if( _imageIndex < _images.size() ) { PImage img = p.loadImage( _imageDir + _images.get( _imageIndex ) ); p.image( img, 0, 0, p.width, p.height ); } // step to next image if( p.frameCount > 0 && p.frameCount % _framesPerImage == 0 ) _imageIndex++; // stop when done if( _imageIndex == _images.size() ) { Renderer.instance().videoRenderer.stop(); } else if( _imageIndex == _images.size() + 1 ) { p.exit(); } }
Example #6
Source File: Demo_BlobDetection_perfTest.java From haxademic with MIT License | 6 votes |
protected void runBlobDetection( PImage source ) { if(_usingPimg == true) { blobBufferImg.copy(source, 0, 0, source.width, source.height, 0, 0, blobBufferImg.width, blobBufferImg.height); FastBlurFilter.blur(blobBufferImg, 3); theBlobDetection.computeBlobs(blobBufferImg.pixels); } else { // blobBufferGraphics.beginDraw(); blobBufferGraphics.copy(source, 0, 0, source.width, source.height, 0, 0, blobBufferGraphics.width, blobBufferGraphics.height); // blobBufferGraphics.endDraw(); // blobBufferImg.copy(source, 0, 0, source.width, source.height, 0, 0, blobBufferImg.width, blobBufferImg.height); // BlurProcessingFilter.instance(P.p).applyTo(blobBufferGraphics); BlurHFilter.instance(P.p).applyTo(blobBufferGraphics); BlurVFilter.instance(P.p).applyTo(blobBufferGraphics); // blobBufferGraphics.filter(_blurH); // blobBufferGraphics.filter(_blurV); // theBlobDetection.computeBlobs(blobBufferImg.pixels); theBlobDetection.computeBlobs(blobBufferGraphics.get().pixels); } }
Example #7
Source File: MultiSimpleCalibrator.java From PapARt with GNU Lesser General Public License v3.0 | 6 votes |
public static void drawAR(PApplet parent, PGraphicsOpenGL g, MultiSimpleCalibrator multiCalibrator, PVector pt) { // AR rendering, for touch and color tracking (and debug). if (multiCalibrator.getDisplay() instanceof ARDisplay) { ARDisplay display = (ARDisplay) multiCalibrator.getDisplay(); display.drawScreensOver(); parent.noStroke(); PImage img = multiCalibrator.getCameraTracking().getPImage(); if (multiCalibrator.getCameraTracking() != null && img != null) { parent.image(img, 0, 0, parent.width, parent.height); // ((PGraphicsOpenGL) (parent.g)).image(camera.getPImage(), 0, 0, frameWidth, frameHeight); } // TODO: Distorsion problems with higher image space distorisions (useless ?) DrawUtils.drawImage((PGraphicsOpenGL) parent.g, display.render(), 0, 0, parent.width, parent.height); } }
Example #8
Source File: ScreenshotUtil.java From haxademic with MIT License | 6 votes |
@Deprecated public static PImage getScreenshotMainMonitor(int x, int y, int width, int height) { GraphicsEnvironment ge = GraphicsEnvironment.getLocalGraphicsEnvironment(); GraphicsDevice[] gs = ge.getScreenDevices(); //DisplayMode mode = gs[0].getDisplayMode(); Rectangle bounds = new Rectangle(x, y, width, height); BufferedImage desktop = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB); try { desktop = new Robot(gs[0]).createScreenCapture(bounds); } catch(AWTException e) { System.err.println("Screen capture failed."); } return new PImage(desktop); }
Example #9
Source File: Demo_TexturedPointSheet.java From haxademic with MIT License | 6 votes |
protected void firstFrame() { // build points shape pointsShape = p.createShape(); pointsShape.beginShape(PConstants.POINTS); pointsShape.noFill(); float spread = 5f; pointsShape.strokeWeight(spread * 0.75f); PImage img = DemoAssets.smallTexture(); for (int x = 0; x < img.width; x++) { for (int y = 0; y < img.height; y++) { int pixelColor = ImageUtil.getPixelColor(img, x, y); pointsShape.stroke(pixelColor); pointsShape.vertex(x * spread, y * spread); } } pointsShape.endShape(); }
Example #10
Source File: Demo_WebCamMotionDetectionRectZoom.java From haxademic with MIT License | 6 votes |
@Override public void newFrame(PImage frame) { // copy webcam and create motion detection at size of cropped webcam (and downscaling) // ImageUtil.cropFillCopyImage(frame, webcamBuffer, true); ImageUtil.copyImageFlipH(frame, webcamBuffer); // ImageUtil.copyImage(frame, webcamBuffer); // ImageUtil.flipH(webcamBuffer); // lazy init and update motion detection buffers/calcs if(motionDetectionMap == null) { motionDetectionMap = new BufferMotionDetectionMap(webcamBuffer, motionBufferScale); } motionDetectionMap.setBlendLerp(UI.value(BLEND_LERP)); motionDetectionMap.setDiffThresh(UI.value(DIFF_THRESH)); motionDetectionMap.setFalloffBW(UI.value(FALLOFF_BW)); motionDetectionMap.setThresholdCutoff(UI.value(THRESHOLD_CUTOFF)); motionDetectionMap.setBlur(UI.value(MOTION_DETECT_BLUR)); motionDetectionMap.updateSource(webcamBuffer); // set textures for debug view // DebugView.setTexture(frame); // DebugView.setTexture(motionDetectionMap.backplate()); // DebugView.setTexture(motionDetectionMap.differenceBuffer()); // DebugView.setTexture(motionDetectionMap.bwBuffer()); }
Example #11
Source File: TiledFrameExporter.java From toxiclibs with GNU Lesser General Public License v2.1 | 6 votes |
TiledFrameExporter(PApplet p, int n) { parent = p; numTiles = n; buffer = new PImage(p.width * n, p.height * n); offsets = new Vec3D[numTiles * numTiles]; normTileSize = 2.0 / numTiles; aspect = (double) p.height / p.width; int idx = 0; double y = 1 - normTileSize; while (idx < offsets.length) { double x = -1; for (int xi = 0; xi < numTiles; xi++) { offsets[idx++] = new Vec3D((float) x, (float) y, 0); x += normTileSize; } y -= normTileSize; } }
Example #12
Source File: Player.java From Project-16x16 with GNU General Public License v3.0 | 5 votes |
/** * Sets the current animation sequence for the Player to use * * @param anim the animation id */ private void setAnimation(ACTION anim) { if (animation.name == anim.name() && !animation.ended) { return; } ArrayList<PImage> animSequence = playerAnimationSequences.get(anim); switch (anim) { case WALK : animation.changeAnimation(animSequence, true, 6); break; case IDLE : animation.changeAnimation(animSequence, true, 20); break; case JUMP : animation.changeAnimation(animSequence, false, 4); break; case LAND : animation.changeAnimation(animSequence, false, 2); break; case FALL : animation.changeAnimation(animSequence, true, 20); break; case ATTACK : animation.changeAnimation(animSequence, false, 4); break; case DASH : animation.changeAnimation(animSequence, false, 6); break; case DASH_ATTACK : animation.changeAnimation(animSequence, false, 2); break; } animation.ended = false; animation.name = anim.name(); }
Example #13
Source File: GifRender4.java From haxademic with MIT License | 5 votes |
public void renderGifFrame() { PImage screenshot = get(); BufferedImage newFrame = (BufferedImage) screenshot.getNative(); encoder.addFrame(newFrame); if( p.frameCount == 60 ) { encoder.finish(); P.println("gif render done!"); } }
Example #14
Source File: ReflectionFilter.java From haxademic with MIT License | 5 votes |
protected void drawPixels( PImage source ) { _pg.beginDraw(); ImageUtil.clearPGraphics( _pg ); _pg.noStroke(); _pg.fill(0,0); _pg.copy(source, 0, 0, source.width, source.height, 0, 0, source.width, source.height ); _pg.copy( ImageUtil.getReversePImageFast( source ), source.width / 2, 0, source.width / 2, source.height, source.width / 2, 0, source.width / 2, source.height ); _pg.endDraw(); _image.copy( _pg, 0, 0, _width, _height, 0, 0, _width, _height ); }
Example #15
Source File: FileUtil.java From haxademic with MIT License | 5 votes |
public static ArrayList<PImage> loadImagesFromDir(String directory, String formats) { ArrayList<PImage> images = new ArrayList<PImage>(); ArrayList<String> imageFiles = FileUtil.getFilesInDirOfTypes( directory, formats ); for( int j=0; j < imageFiles.size(); j++ ) { images.add(P.p.loadImage(imageFiles.get(j))); } return images; }
Example #16
Source File: Device.java From KinectPV2 with MIT License | 5 votes |
/** * Get Depth Mask Image, outline color of the users. * * @return PImage */ public PImage getDepthMaskImage() { int[] depthMaskData = jniGetDepthMask(); PApplet.arrayCopy(depthMaskData, 0, depthMaskImg.pixels(), 0, depthMaskImg.getImgSize()); depthMaskImg.updatePixels(); // jniDepthReadyCopy(true); return depthMaskImg.img; }
Example #17
Source File: ParticleSwirl.java From haxademic with MIT License | 5 votes |
public Particle2d launch(PGraphics pg, float x, float y, PImage img) { super.launch(pg, x, y, img); // repurpose x/y as radius/rads radius = x; radians = y; // chill out the reporposed values gravity.div(10f); speed.x = speed.x / 30f; speed.y = speed.y; return this; }
Example #18
Source File: BrimLiskiRepetitions.java From haxademic with MIT License | 5 votes |
protected void drawColorMesh() { if( _mesh == null ) createMesh(); PImage img = _myMovie; // set draw props to draw texture mesh properly p.fill( 0 ); p.noStroke(); p.translate( 12, 12, 12 ); deformWithAudio(); // iterate over all mesh triangles // and add their vertices p.beginShape(P.TRIANGLES); int index = 0; // use to traverse eq spectrum if( _myMovie.pixels.length > 100 ) { for( Face f : _meshDeform.getFaces() ) { // P.println((int)f.a.x+","+(int)f.a.y); p.fill( ImageUtil.getPixelColor( img, (int)f.a.x, (int)f.a.y ) ); // P.println(ImageUtil.getPixelColor( img, (int)f.a.x, (int)f.a.y )); // get z-depth // brightA = getBrightnessForTextureLoc( img, f.uvA.x, f.uvA.y ) * 3; // brightB = getBrightnessForTextureLoc( img, f.uvB.x, f.uvB.y ) * 3; // brightC = getBrightnessForTextureLoc( img, f.uvC.x, f.uvC.y ) * 3; // draw vertices // float amp = 0.5f + p.getAudio().getFFT().spectrum[index % 512] * 10; p.vertex(f.a.x,f.a.y,f.a.z); p.vertex(f.b.x,f.b.y,f.b.z); p.vertex(f.c.x,f.c.y,f.c.z); index++; } } p.endShape(); }
Example #19
Source File: ColorDetection.java From PapARt with GNU Lesser General Public License v3.0 | 5 votes |
/** * Draw the image from 'getImage', at the current location. */ public void drawCapturedImage() { PImage out = getImage(); if (out != null) { paperScreen.image(out, 0, 0, captureSize.x, captureSize.y); } }
Example #20
Source File: ImageSequenceMovieClip.java From haxademic with MIT License | 5 votes |
public PImage getFrameByProgress(float progress) { int index = P.floor(progress * (float) totalImages) % totalImages; if(index < imageSequence.size()) { return imageSequence.get(index); } else { return BLANK_IMAGE; } }
Example #21
Source File: Device.java From KinectPV2 with MIT License | 5 votes |
/** * Get Long Exposure Infrared Image as PImage 512 x 424 * * @return PImage */ public PImage getInfraredLongExposureImage() { int[] longExposureData = jniGetInfraredLongExposure(); PApplet.arrayCopy(longExposureData, 0, infraredLongExposureImg.pixels(), 0, infraredLongExposureImg.getImgSize()); infraredLongExposureImg.updatePixels(); return infraredLongExposureImg.img; }
Example #22
Source File: ColorDetection.java From PapARt with GNU Lesser General Public License v3.0 | 5 votes |
/** * Return the image used for color computation. Warning, can return null * images. * * @return the PImage or null in debug mode. */ public PImage getImage() { // TODO: NoCamera HACK if (paperScreen.cameraTracking == null) { return null; } PImage out = boardView.getViewOf(paperScreen.cameraTracking); return out; }
Example #23
Source File: Demo_BufferActivityMonitor_flipped.java From haxademic with MIT License | 5 votes |
public void newFrame(PImage frame) { // lazy-init flipped camera buffer if(flippedCamera == null) flippedCamera = PG.newPG2DFast(frame.width, frame.height); ImageUtil.copyImageFlipH(frame, flippedCamera); // calculate activity monitor with new frame activityMonitor.update(flippedCamera); DebugView.setTexture("flippedCamera", flippedCamera); }
Example #24
Source File: ChromaMovie.java From haxademic with MIT License | 5 votes |
public PImage image() { if(isLoaded() == false || isPlaying == false) { return BLANK_IMAGE; } else { return buffer; } }
Example #25
Source File: ParticleSystem.java From PixelFlow with MIT License | 5 votes |
public PShape createParticleShape(DwParticle2D particle, PImage pimg_sprite){ final float rad = 2; PShape shp_sprite = papplet.createShape(); shp_sprite.beginShape(PConstants.QUADS); shp_sprite.noStroke(); shp_sprite.noFill(); shp_sprite.tint(255,10,10); if(particle.idx == IDX_MOUSE_PARTICLE){ shp_sprite.tint(200,100,100); } else { float r = 0 + papplet.random(-30, 30); float g = 100; float b = 100; shp_sprite.tint(r,g,b); } shp_sprite.textureMode(PConstants.NORMAL); shp_sprite.texture(pimg_sprite); shp_sprite.normal(0, 0, 1); shp_sprite.vertex(-rad, -rad, 0, 0); shp_sprite.vertex(+rad, -rad, 1, 0); shp_sprite.vertex(+rad, +rad, 1, 1); shp_sprite.vertex(-rad, +rad, 0, 1); shp_sprite.endShape(); return shp_sprite; }
Example #26
Source File: Device.java From KinectPV2 with MIT License | 5 votes |
/** * Get Independent Body Index Track * * @param index * @return */ public ArrayList getBodyTrackUser() { ArrayList<PImage> listBodyTack = new ArrayList<PImage>(0); int [] usersIds = jniGetBodyTrackIds(); for(int i = 0; i < usersIds.length; i++){ if( usersIds[i] == 1){ int[] rawData = jniGetBodyIndexUser(i); PApplet.arrayCopy(rawData, 0, bodyTrackUsersImg[i].pixels(), 0, bodyTrackUsersImg[i].getImgSize()); bodyTrackUsersImg[i].updatePixels(); listBodyTack.add(bodyTrackUsersImg[i].img); } } return listBodyTack; }
Example #27
Source File: Shadertoy_AbstractCorridor.java From PixelFlow with MIT License | 5 votes |
public void setup() { surface.setResizable(true); context = new DwPixelFlow(this); context.print(); context.printGL(); toy = new DwShadertoy(context, "data/AbstractCorridor.frag"); // load assets PImage img0 = loadImage("../Shadertoy/Shadertoy_AbstractCorridor/data/Abstract 2.jpg"); PImage img1 = loadImage("../Shadertoy/Shadertoy_AbstractCorridor/data/Wood.jpg"); // create textures tex_0.resize(context, GL2.GL_RGBA8, img0.width, img0.height, GL2.GL_RGBA, GL2.GL_UNSIGNED_BYTE, GL2.GL_LINEAR, GL2.GL_MIRRORED_REPEAT, 4,1); tex_1.resize(context, GL2.GL_RGBA8, img1.width, img1.height, GL2.GL_RGBA, GL2.GL_UNSIGNED_BYTE, GL2.GL_LINEAR, GL2.GL_MIRRORED_REPEAT, 4,1); // copy images to textures DwFilter.get(context).copy.apply(img0, tex_0); DwFilter.get(context).copy.apply(img1, tex_1); // mipmap DwShadertoy.setTextureFilter(tex_0, DwShadertoy.TexFilter.MIPMAP); DwShadertoy.setTextureFilter(tex_1, DwShadertoy.TexFilter.MIPMAP); frameRate(60); }
Example #28
Source File: Device.java From KinectPV2 with MIT License | 5 votes |
/** * Get Color Image as PImage 1920 x 1080 * * @return PImage */ public PImage getColorImage() { int[] colorData = jniGetColorData(); PApplet.arrayCopy(colorData, 0, colorImg.pixels(), 0, colorImg.getImgSize()); colorImg.updatePixels(); PApplet.arrayCopy(colorData, 0, colorImg.rawIntData, 0, colorImg.getImgSize()); return colorImg.img; }
Example #29
Source File: _primage.java From mesh with MIT License | 5 votes |
/** * CAUTION not thread safe when called outside of setup/draw func */ public static Tuple invoke(final Object img, final double x, final double y, final double w, final double h) { if (Processing.INSTANCE != null) { Processing.INSTANCE.image((PImage)img, (float)x, (float)y, (float)w, (float)h); } return Tuple.UNIT; }
Example #30
Source File: Demo_KinectV1_2dConnections.java From haxademic with MIT License | 5 votes |
protected void drawWebCam( float rotations ) { IDepthCamera depthCamera = DepthCamera.instance().camera; // draw cam PG.setColorForPImage(p); PG.setPImageAlpha(p, 0.25f); PImage drawCamImg = depthCamera.getRgbImage(); // PImage drawCamImg = getFilteredCam(); for( int i=0; i < rotations; i++ ) { p.rotate((float)P.TWO_PI/rotations * (float)i); p.image( drawCamImg, 0, 0 ); } }