Java Code Examples for android.renderscript.Type#Builder
The following examples show how to use
android.renderscript.Type#Builder .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: YuvToRgb.java From unity-android-native-camera with MIT License | 6 votes |
private void createAllocations(RenderScript rs) { final int width = mInputSize.getWidth(); final int height = mInputSize.getHeight(); mOutBufferInt = new int[width * height]; Type.Builder yuvTypeBuilder = new Type.Builder(rs, Element.YUV(rs)); yuvTypeBuilder.setX(width); yuvTypeBuilder.setY(height); yuvTypeBuilder.setYuvFormat(ImageFormat.YUV_420_888); mInputAllocation = Allocation.createTyped(rs, yuvTypeBuilder.create(), Allocation.USAGE_IO_INPUT | Allocation.USAGE_SCRIPT); Type rgbType = Type.createXY(rs, Element.RGBA_8888(rs), width, height); Type intType = Type.createXY(rs, Element.U32(rs), width, height); mOutputAllocation = Allocation.createTyped(rs, rgbType, Allocation.USAGE_IO_OUTPUT | Allocation.USAGE_SCRIPT); mOutputAllocationInt = Allocation.createTyped(rs, intType, Allocation.USAGE_SCRIPT); }
Example 2
Source File: Layer.java From rscnn with MIT License | 6 votes |
protected void allocFeatureMap() { Type.Builder outputType = new Type.Builder(renderScript, Element.F32(renderScript)); outputType.setZ(outputShape[0]); outputType.setY(outputShape[1] * outputShape[2]); outputType.setX(getOutputChannelAligned()); Allocation outAllocation = Allocation.createTyped(renderScript, outputType.create()); FeatureMap output = new FeatureMap(); output.setFeatureMap(outAllocation); output.setN(outputShape[0]); output.setH(outputShape[1]); output.setW(outputShape[2]); output.setC(outputShape[3]); output.setPad4(true); if(this.featureMapOutput!=null){ ((FeatureMap)featureMapOutput).getFeatureMap().destroy(); } this.featureMapOutput = output; }
Example 3
Source File: Layer.java From rscnn with MIT License | 6 votes |
protected void allocFeatureMapNoPad() { Type.Builder outputType = new Type.Builder(renderScript, Element.F32(renderScript)); outputType.setZ(outputShape[0]); outputType.setY(outputShape[1] * outputShape[2]); outputType.setX(outputShape[3]); Allocation outAllocation = Allocation.createTyped(renderScript, outputType.create()); FeatureMap output = new FeatureMap(); output.setFeatureMap(outAllocation); output.setN(outputShape[0]); output.setH(outputShape[1]); output.setW(outputShape[2]); output.setC(outputShape[3]); output.setPad4(false); if(this.featureMapOutput!=null){ ((FeatureMap)featureMapOutput).getFeatureMap().destroy(); } this.featureMapOutput = output; }
Example 4
Source File: CameraSource.java From Machine-Learning-Projects-for-Mobile-Applications with MIT License | 6 votes |
public Allocation renderScriptNV21ToRGBA888(Context context, int width, int height, byte[] nv21) { RenderScript rs = RenderScript.create(context); ScriptIntrinsicYuvToRGB yuvToRgbIntrinsic = null; if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.JELLY_BEAN_MR1) { yuvToRgbIntrinsic = ScriptIntrinsicYuvToRGB.create(rs, Element.U8_4(rs)); } Type.Builder yuvType = new Type.Builder(rs, Element.U8(rs)).setX(nv21.length); Allocation in = Allocation.createTyped(rs, yuvType.create(), Allocation.USAGE_SCRIPT); Type.Builder rgbaType = new Type.Builder(rs, Element.RGBA_8888(rs)).setX(width).setY(height); Allocation out = Allocation.createTyped(rs, rgbaType.create(), Allocation.USAGE_SCRIPT); in.copyFrom(nv21); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { yuvToRgbIntrinsic.setInput(in); } if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { yuvToRgbIntrinsic.forEach(out); } return out; }
Example 5
Source File: TflitePlugin.java From flutter_tflite with MIT License | 6 votes |
public Allocation renderScriptNV21ToRGBA888(Context context, int width, int height, byte[] nv21) { // https://stackoverflow.com/a/36409748 RenderScript rs = RenderScript.create(context); ScriptIntrinsicYuvToRGB yuvToRgbIntrinsic = ScriptIntrinsicYuvToRGB.create(rs, Element.U8_4(rs)); Type.Builder yuvType = new Type.Builder(rs, Element.U8(rs)).setX(nv21.length); Allocation in = Allocation.createTyped(rs, yuvType.create(), Allocation.USAGE_SCRIPT); Type.Builder rgbaType = new Type.Builder(rs, Element.RGBA_8888(rs)).setX(width).setY(height); Allocation out = Allocation.createTyped(rs, rgbaType.create(), Allocation.USAGE_SCRIPT); in.copyFrom(nv21); yuvToRgbIntrinsic.setInput(in); yuvToRgbIntrinsic.forEach(out); return out; }
Example 6
Source File: STUtils.java From Fatigue-Detection with MIT License | 6 votes |
@SuppressLint("NewApi") public static Bitmap NV21ToRGBABitmap(byte []nv21, int width, int height, Context context) { TimingLogger timings = new TimingLogger(TIMING_LOG_TAG, "NV21ToRGBABitmap"); Rect rect = new Rect(0, 0, width, height); try { Class.forName("android.renderscript.Element$DataKind").getField("PIXEL_YUV"); Class.forName("android.renderscript.ScriptIntrinsicYuvToRGB"); byte[] imageData = nv21; if (mRS == null) { mRS = RenderScript.create(context); mYuvToRgb = ScriptIntrinsicYuvToRGB.create(mRS, Element.U8_4(mRS)); Type.Builder tb = new Type.Builder(mRS, Element.createPixel(mRS, Element.DataType.UNSIGNED_8, Element.DataKind.PIXEL_YUV)); tb.setX(width); tb.setY(height); tb.setMipmaps(false); tb.setYuvFormat(ImageFormat.NV21); ain = Allocation.createTyped(mRS, tb.create(), Allocation.USAGE_SCRIPT); timings.addSplit("Prepare for ain"); Type.Builder tb2 = new Type.Builder(mRS, Element.RGBA_8888(mRS)); tb2.setX(width); tb2.setY(height); tb2.setMipmaps(false); aOut = Allocation.createTyped(mRS, tb2.create(), Allocation.USAGE_SCRIPT & Allocation.USAGE_SHARED); timings.addSplit("Prepare for aOut"); bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); timings.addSplit("Create Bitmap"); } ain.copyFrom(imageData); timings.addSplit("ain copyFrom"); mYuvToRgb.setInput(ain); timings.addSplit("setInput ain"); mYuvToRgb.forEach(aOut); timings.addSplit("NV21 to ARGB forEach"); aOut.copyTo(bitmap); timings.addSplit("Allocation to Bitmap"); } catch (Exception e) { YuvImage yuvImage = new YuvImage(nv21, ImageFormat.NV21, width, height, null); timings.addSplit("NV21 bytes to YuvImage"); ByteArrayOutputStream baos = new ByteArrayOutputStream(); yuvImage.compressToJpeg(rect, 90, baos); byte[] cur = baos.toByteArray(); timings.addSplit("YuvImage crop and compress to Jpeg Bytes"); bitmap = BitmapFactory.decodeByteArray(cur, 0, cur.length); timings.addSplit("Jpeg Bytes to Bitmap"); } timings.dumpToLog(); return bitmap; }
Example 7
Source File: Healing.java From style-transfer with Apache License 2.0 | 5 votes |
/** * This function only assumes mPointsXY, mPasteOffX, mPasteOffY * * @param healing * @param rs * @param image */ public void heal(ScriptC_healing healing, RenderScript rs, Bitmap image, Bitmap output) { long time = System.nanoTime(); Type.Builder floatImage = new Type.Builder(rs, Element.F32_3(rs)); floatImage.setX(mRoiBounds.width()); floatImage.setY(mRoiBounds.height()); Bitmap mask_bitmap = buildMask(mRoiBounds, mPointsXY); Bitmap dest_bitmap = createMutableBitmap(image, mRoiBounds.left, mRoiBounds.top, mRoiBounds.width(), mRoiBounds.height()); Allocation dest_alloc = Allocation.createFromBitmap(rs, dest_bitmap); Bitmap src_bitmap = createMutableBitmap(image, mCutOffsetX, mCutOffsetY, mRoiBounds.width(), mRoiBounds.height()); Allocation src_alloc = Allocation.createFromBitmap(rs, src_bitmap); Allocation mask_alloc = Allocation.createFromBitmap(rs, mask_bitmap); healing.invoke_heal(mask_alloc, src_alloc, dest_alloc); dest_alloc.copyTo(dest_bitmap); dest_bitmap.setHasAlpha(true); // build the undo mUndoBitmap = Bitmap.createBitmap(mRoiBounds.width(), mRoiBounds.height(), Bitmap.Config.ARGB_8888); Canvas undoCanvas = new Canvas(mUndoBitmap); Rect undoRect = new Rect(0, 0, mRoiBounds.width(), mRoiBounds.height()); undoCanvas.drawBitmap(output, mRoiBounds, undoRect, null); Canvas c = new Canvas(output); c.drawBitmap(image, 0, 0, null); c.drawBitmap(dest_bitmap, mRoiBounds.left, mRoiBounds.top, null); Log.v(TAG, " time ss to smart paste = " + (System.nanoTime() - time) / 1E6f + "ms"); heal_orig(healing, rs, image, output); }
Example 8
Source File: FindRegion.java From style-transfer with Apache License 2.0 | 5 votes |
Allocation allocFloat2(float[] p, RenderScript rs) { Type.Builder builderF32_2 = new Type.Builder(rs, Element.F32_2(rs)); builderF32_2.setX(p.length / 2); Allocation ret = Allocation.createTyped(rs, builderF32_2.create()); ret.copyFrom(p); return ret; }
Example 9
Source File: ViewfinderProcessor.java From android-HdrViewfinder with Apache License 2.0 | 5 votes |
public ViewfinderProcessor(RenderScript rs, Size dimensions) { Type.Builder yuvTypeBuilder = new Type.Builder(rs, Element.YUV(rs)); yuvTypeBuilder.setX(dimensions.getWidth()); yuvTypeBuilder.setY(dimensions.getHeight()); yuvTypeBuilder.setYuvFormat(ImageFormat.YUV_420_888); mInputHdrAllocation = Allocation.createTyped(rs, yuvTypeBuilder.create(), Allocation.USAGE_IO_INPUT | Allocation.USAGE_SCRIPT); mInputNormalAllocation = Allocation.createTyped(rs, yuvTypeBuilder.create(), Allocation.USAGE_IO_INPUT | Allocation.USAGE_SCRIPT); Type.Builder rgbTypeBuilder = new Type.Builder(rs, Element.RGBA_8888(rs)); rgbTypeBuilder.setX(dimensions.getWidth()); rgbTypeBuilder.setY(dimensions.getHeight()); mPrevAllocation = Allocation.createTyped(rs, rgbTypeBuilder.create(), Allocation.USAGE_SCRIPT); mOutputAllocation = Allocation.createTyped(rs, rgbTypeBuilder.create(), Allocation.USAGE_IO_OUTPUT | Allocation.USAGE_SCRIPT); HandlerThread processingThread = new HandlerThread("ViewfinderProcessor"); processingThread.start(); mProcessingHandler = new Handler(processingThread.getLooper()); mHdrMergeScript = new ScriptC_hdr_merge(rs); mHdrMergeScript.set_gPrevFrame(mPrevAllocation); mHdrTask = new ProcessingTask(mInputHdrAllocation, dimensions.getWidth()/2, true); mNormalTask = new ProcessingTask(mInputNormalAllocation, 0, false); setRenderMode(MODE_NORMAL); }
Example 10
Source File: Camera.java From android_9.0.0_r45 with Apache License 2.0 | 4 votes |
/** * <p>Create a {@link android.renderscript RenderScript} * {@link android.renderscript.Allocation Allocation} to use as a * destination of preview callback frames. Use * {@link #setPreviewCallbackAllocation setPreviewCallbackAllocation} to use * the created Allocation as a destination for camera preview frames.</p> * * <p>The Allocation will be created with a YUV type, and its contents must * be accessed within Renderscript with the {@code rsGetElementAtYuv_*} * accessor methods. Its size will be based on the current * {@link Parameters#getPreviewSize preview size} configured for this * camera.</p> * * @param rs the RenderScript context for this Allocation. * @param usage additional usage flags to set for the Allocation. The usage * flag {@link android.renderscript.Allocation#USAGE_IO_INPUT} will always * be set on the created Allocation, but additional flags may be provided * here. * @return a new YUV-type Allocation with dimensions equal to the current * preview size. * @throws RSIllegalArgumentException if the usage flags are not compatible * with an YUV Allocation. * @see #setPreviewCallbackAllocation * @hide */ public final Allocation createPreviewAllocation(RenderScript rs, int usage) throws RSIllegalArgumentException { Parameters p = getParameters(); Size previewSize = p.getPreviewSize(); Type.Builder yuvBuilder = new Type.Builder(rs, Element.createPixel(rs, Element.DataType.UNSIGNED_8, Element.DataKind.PIXEL_YUV)); // Use YV12 for wide compatibility. Changing this requires also // adjusting camera service's format selection. yuvBuilder.setYuvFormat(ImageFormat.YV12); yuvBuilder.setX(previewSize.width); yuvBuilder.setY(previewSize.height); Allocation a = Allocation.createTyped(rs, yuvBuilder.create(), usage | Allocation.USAGE_IO_INPUT); return a; }
Example 11
Source File: STUtils.java From TikTok with Apache License 2.0 | 4 votes |
@SuppressLint("NewApi") public static Bitmap NV21ToRGBABitmap(byte []nv21, int width, int height, Context context) { TimingLogger timings = new TimingLogger(TIMING_LOG_TAG, "NV21ToRGBABitmap"); Rect rect = new Rect(0, 0, width, height); try { Class.forName("android.renderscript.Element$DataKind").getField("PIXEL_YUV"); Class.forName("android.renderscript.ScriptIntrinsicYuvToRGB"); byte[] imageData = nv21; if (mRS == null) { mRS = RenderScript.create(context); mYuvToRgb = ScriptIntrinsicYuvToRGB.create(mRS, Element.U8_4(mRS)); Type.Builder tb = new Type.Builder(mRS, Element .createPixel(mRS, Element.DataType.UNSIGNED_8, Element.DataKind.PIXEL_YUV)); tb.setX(width); tb.setY(height); tb.setMipmaps(false); tb.setYuvFormat(ImageFormat.NV21); ain = Allocation.createTyped(mRS, tb.create(), Allocation.USAGE_SCRIPT); timings.addSplit("Prepare for ain"); Type.Builder tb2 = new Type.Builder(mRS, Element.RGBA_8888(mRS)); tb2.setX(width); tb2.setY(height); tb2.setMipmaps(false); aOut = Allocation .createTyped(mRS, tb2.create(), Allocation.USAGE_SCRIPT & Allocation.USAGE_SHARED); timings.addSplit("Prepare for aOut"); bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); timings.addSplit("Create Bitmap"); } ain.copyFrom(imageData); timings.addSplit("ain copyFrom"); mYuvToRgb.setInput(ain); timings.addSplit("setInput ain"); mYuvToRgb.forEach(aOut); timings.addSplit("NV21 to ARGB forEach"); aOut.copyTo(bitmap); timings.addSplit("Allocation to Bitmap"); } catch (Exception e) { YuvImage yuvImage = new YuvImage(nv21, ImageFormat.NV21, width, height, null); timings.addSplit("NV21 bytes to YuvImage"); ByteArrayOutputStream baos = new ByteArrayOutputStream(); yuvImage.compressToJpeg(rect, 90, baos); byte[] cur = baos.toByteArray(); timings.addSplit("YuvImage crop and compress to Jpeg Bytes"); bitmap = BitmapFactory.decodeByteArray(cur, 0, cur.length); timings.addSplit("Jpeg Bytes to Bitmap"); } timings.dumpToLog(); return bitmap; }
Example 12
Source File: Deconvolution.java From rscnn with MIT License | 4 votes |
private void initKernel() { int inputChannel = inputShape[0][3]; int kernelHeight = kernelH; int kernelWidth = kernelW; int kernelSize = kernelHeight * kernelWidth; int inputChannelAligned = getInputChannelAligned(); int totalKernelSize = inputChannelAligned * kernelSize; Allocation kernelAllocation; Allocation biasAllocation; Type.Builder kernelType = new Type.Builder(renderScript, Element.F32(renderScript)); kernelType.setX(inputChannelAligned); kernelType.setY(kernelH * kernelW); Type biasType = Type.createX(renderScript, Element.F32(renderScript), inputChannelAligned); kernelAllocation = Allocation.createTyped(renderScript, kernelType.create(), Allocation.MipmapControl.MIPMAP_NONE, Allocation.USAGE_GRAPHICS_TEXTURE | Allocation.USAGE_SCRIPT); biasAllocation = Allocation.createTyped(renderScript, biasType, Allocation.MipmapControl.MIPMAP_NONE, Allocation.USAGE_GRAPHICS_TEXTURE | Allocation.USAGE_SCRIPT); if(weightBuffer==null) { float[] kernelMatrix = new float[totalKernelSize]; float[] biasArray = new float[inputChannelAligned]; int count = 0; for (int j = 0; j < kernelHeight; j++) { for (int k = 0; k < kernelWidth; k++) { for (int i = 0; i < inputChannelAligned; i++) { if (i >= inputChannel) { kernelMatrix[count++] = 0; } else { kernelMatrix[count++] = weight[i][0][j][k]; } } } } for (int i = 0; i < inputChannelAligned; i++) { if (i >= inputChannel) { biasArray[i] = 0; } else { biasArray[i] = bias[i]; } } kernelAllocation.copyFrom(kernelMatrix); biasAllocation.copyFrom(biasArray); } else { kernelAllocation.copyFromUnchecked(weightBuffer); biasAllocation.copyFromUnchecked(biasBuffer); } scriptDeconvolution = new ScriptC_Deconvolution(renderScript); scriptDeconvolution.set_BiasData(biasAllocation); scriptDeconvolution.set_KernelData(kernelAllocation); scriptDeconvolution.set_channelAligned(inputChannelAligned); scriptDeconvolution.set_padH(padH); scriptDeconvolution.set_padW(padW); scriptDeconvolution.set_strideH(strideH); scriptDeconvolution.set_strideW(strideW); scriptDeconvolution.set_kernelH(kernelH); scriptDeconvolution.set_kernelW(kernelW); }
Example 13
Source File: Convolution.java From rscnn with MIT License | 4 votes |
@Override public void computeFeatureMap() { int outputHeight = outputShape[1]; int outputWidth = outputShape[2]; int inputChannel = inputShape[0][3]; int outputChannel = outputShape[3]; int outputChannelAligned = getOutputChannelAligned(); int inputChannelAligned = getInputChannelAligned(); FeatureMap input = (FeatureMap) featureMapInput[0]; FeatureMap output = (FeatureMap) featureMapOutput; Allocation inputFeatureMap = input.getFeatureMap(); Allocation outputFeatureMap = output.getFeatureMap(); scriptConvolution.set_InputData(inputFeatureMap); scriptConvolution.set_OutputData(outputFeatureMap); ScriptC.LaunchOptions option = new Script.LaunchOptions(); boolean useIntrinsicBlas = false; if(conv1x1UserIntrinsic && kernelH==1 && kernelW==1){ useIntrinsicBlas = true; } if(convnxnUseIntrinsic && kernelH!=1 && kernelW!=1){ conv1x1UserIntrinsic = true; } if(useIntrinsicBlas){ if(kernelH==1 && kernelW==1){ scriptIntrinsicBLAS.SGEMM(ScriptIntrinsicBLAS.NO_TRANSPOSE, ScriptIntrinsicBLAS.TRANSPOSE, 1.f, inputFeatureMap, kernelAllocation, 0.f, outputFeatureMap); } else if (inputChannel == group) { option.setX(0, getInputChannelAligned() / 4).setY(0, outputHeight * outputWidth); scriptConvolution.forEach_conv_dw4(option); return; } else { Type.Builder colType = new Type.Builder(renderScript, Element.F32(renderScript)); colType.setX(kernelH * kernelW * inputChannelAligned).setY(outputHeight * outputWidth); Allocation colAllocation = Allocation.createTyped(renderScript, colType.create()); scriptConvolution.set_ColData(colAllocation); option.setX(0, kernelH * kernelW).setY(0, outputHeight * outputWidth); scriptConvolution.forEach_conv_im2col2(option); scriptIntrinsicBLAS.SGEMM(ScriptIntrinsicBLAS.NO_TRANSPOSE, ScriptIntrinsicBLAS.TRANSPOSE, 1.f, colAllocation, kernelAllocation, 0.f, outputFeatureMap); colAllocation.destroy(); } if(nextRelu && biasTerm){ scriptConvolution.forEach_conv_bias_relu(outputFeatureMap, outputFeatureMap); } else if(biasTerm){ scriptConvolution.forEach_conv_bias(outputFeatureMap, outputFeatureMap); } else if(nextRelu){ scriptConvolution.forEach_conv_relu(outputFeatureMap, outputFeatureMap); } } else { if(kernelH==1 && kernelW==1){ option.setX(0, getOutputChannelAligned() / 4).setY(0, outputHeight * outputWidth); scriptConvolution.forEach_conv1x1(option); } else if (inputChannel == group) { option.setX(0, getInputChannelAligned() / 4).setY(0, outputHeight * outputWidth); scriptConvolution.forEach_conv_dw4(option); } else { int blockSize = 4; int[] blockSizeList = {256, 128, 96, 64, 48, 32, 16, 8}; for (int blk : blockSizeList) { if (outputChannelAligned % blk == 0) { blockSize = blk; break; } } scriptConvolution.set_nblock(blockSize); option.setX(0, outputChannelAligned / blockSize).setY(0, outputHeight * outputWidth); scriptConvolution.forEach_conv4n(option); } } }
Example 14
Source File: Healing.java From style-transfer with Apache License 2.0 | 4 votes |
/** * This function only assumes mPointsXY, mPasteOffX, mPasteOffY * * @param healing * @param rs * @param image */ public void heal_orig(ScriptC_healing healing, RenderScript rs, Bitmap image, Bitmap output) { long time = System.nanoTime(); Type.Builder floatImage = new Type.Builder(rs, Element.F32_3(rs)); floatImage.setX(mRoiBounds.width()); floatImage.setY(mRoiBounds.height()); Bitmap maskBitmap = buildMask(mRoiBounds, mPointsXY); Allocation dest1 = Allocation.createTyped(rs, floatImage.create()); Allocation dest2 = Allocation.createTyped(rs, floatImage.create()); healing.set_dest1(dest1); healing.set_dest2(dest2); Bitmap destBitmap = createMutableBitmap(image, mRoiBounds.left, mRoiBounds.top, mRoiBounds.width(), mRoiBounds.height()); Allocation dest_uc4 = Allocation.createFromBitmap(rs, destBitmap); healing.forEach_convert_to_f(dest_uc4, dest1); Bitmap src = createMutableBitmap(image, mCutOffsetX, mCutOffsetY, mRoiBounds.width(), mRoiBounds.height()); Allocation src_f3 = Allocation.createTyped(rs, floatImage.create()); Allocation src_uc4 = Allocation.createFromBitmap(rs, src); healing.forEach_convert_to_f(src_uc4, src_f3); healing.set_src(src_f3); Allocation mask = Allocation.createFromBitmap(rs, maskBitmap); healing.set_mask(mask); Allocation laplace_f3 = Allocation.createTyped(rs, floatImage.create()); healing.set_laplace(laplace_f3); Script.LaunchOptions options = new Script.LaunchOptions(); options.setX(1, mRoiBounds.width() - 1); options.setY(1, mRoiBounds.height() - 1); healing.forEach_laplacian(laplace_f3, options); healing.forEach_copyMasked(mask, dest1); int steps = (int) Math.hypot(mRoiBounds.width(), mRoiBounds.height()); // match RS Single source Log.v(TAG, "Healing_orig :steps = " + steps); for (int i = 0; i < steps; i++) { healing.forEach_solve1(mask, dest2); healing.forEach_solve2(mask, dest1); } healing.forEach_convert_to_uc(dest1, dest_uc4); rs.finish(); healing.forEach_alphaMask(dest_uc4, dest_uc4); rs.finish(); dest_uc4.copyTo(destBitmap); rs.finish(); destBitmap.setHasAlpha(true); rs.finish(); // build the undo mUndoBitmap = Bitmap.createBitmap(mRoiBounds.width(), mRoiBounds.height(), Bitmap.Config.ARGB_8888); Canvas undoCanvas = new Canvas(mUndoBitmap); Rect undoRect = new Rect(0, 0, mRoiBounds.width(), mRoiBounds.height()); undoCanvas.drawBitmap(output, mRoiBounds, undoRect, null); Canvas c = new Canvas(output); c.drawBitmap(image, 0, 0, null); c.drawBitmap(destBitmap, mRoiBounds.left, mRoiBounds.top, null); Log.v(TAG, " time to smart paste = " + (System.nanoTime() - time) / 1E6f + "ms"); }
Example 15
Source File: Filter.java From OnionCamera with MIT License | 4 votes |
public void reset(int width, int height) { if (mAllocationOut != null) { mAllocationOut.destroy(); } mWidth = width; mHeight = height; mSize = width * height; Type.Builder tb; tb = new Type.Builder(mRS, Element.U8(mRS)).setX(mWidth).setY(mHeight); mAllocationIn = Allocation.createTyped(mRS, tb.create(), Allocation.USAGE_SCRIPT); tb = new Type.Builder(mRS, Element.F32(mRS)).setX(mWidth).setY(mHeight); mAllocationBlurred = Allocation.createTyped(mRS, tb.create(), Allocation.USAGE_SCRIPT); mAllocationMagnitude = Allocation.createTyped(mRS, tb.create(), Allocation.USAGE_SCRIPT); tb = new Type.Builder(mRS, Element.I32(mRS)).setX(mWidth).setY(mHeight); mAllocationDirection = Allocation.createTyped(mRS, tb.create(), Allocation.USAGE_SCRIPT); mAllocationEdge = Allocation.createTyped(mRS, tb.create(), Allocation.USAGE_SCRIPT); tb = new Type.Builder(mRS, Element.I32(mRS)).setX(256); mAllocationHistogram = Allocation.createTyped(mRS, tb.create(), Allocation.USAGE_SCRIPT); tb = new Type.Builder(mRS, Element.RGBA_8888(mRS)).setX(mWidth).setY(mHeight); mAllocationOut = Allocation.createTyped(mRS, tb.create(), Allocation.USAGE_SCRIPT | Allocation.USAGE_IO_OUTPUT); setupSurface(); mHistogram.setOutput(mAllocationHistogram); mEffects.invoke_set_histogram(mAllocationHistogram); mEffects.invoke_set_blur_input(mAllocationIn); mEffects.invoke_set_compute_gradient_input(mAllocationBlurred); mEffects.invoke_set_suppress_input(mAllocationMagnitude, mAllocationDirection); mEffects.invoke_set_hysteresis_input(mAllocationEdge); mEffects.invoke_set_thresholds(0.2f, 0.6f); sc = new LaunchOptions(); sc.setX(2, mWidth - 3); sc.setY(2, mHeight - 3); histo = new int[256]; }