Java Code Examples for org.jcodec.common.model.Picture#create()
The following examples show how to use
org.jcodec.common.model.Picture#create() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SequenceEncoderMp4.java From ImageToVideo with Apache License 2.0 | 6 votes |
public void encodeNativeFrame(Picture pic) throws IOException { if (toEncode == null) { toEncode = Picture.create(pic.getWidth() , pic.getHeight() , encoder.getSupportedColorSpaces()[0]); } // Perform conversion transform.transform(pic, toEncode); // Encode image into H.264 frame, the result is stored in '_out' buffer _out.clear(); ByteBuffer result = encoder.encodeFrame(toEncode, _out); // Based on the frame above form correct MP4 packet spsList.clear(); ppsList.clear(); H264Utils.wipePS(result, spsList, ppsList); H264Utils.encodeMOVPacket(result); // Add packet to video track outTrack.addFrame(new MP4Packet(result, frameNo, timeScale, 1, frameNo, true, null, frameNo, 0)); frameNo++; }
Example 2
Source File: PictureUtil.java From cineast with MIT License | 6 votes |
public static int[] toColorArray(Picture src){ if (src.getColor() != ColorSpace.RGB) { Transform transform = ColorUtil.getTransform(src.getColor(), ColorSpace.RGB); Picture rgb = Picture.create(src.getWidth(), src.getHeight(), ColorSpace.RGB, src.getCrop()); transform.transform(src, rgb); src = rgb; } int[] _return = new int[src.getCroppedWidth() * src.getCroppedHeight()]; int[] data = src.getPlaneData(0); for(int i = 0; i < _return.length; ++i){ _return[i] = ReadableRGBContainer.toIntColor(data[3*i + 2], data[3*i + 1], data[3*i]); } return _return; }
Example 3
Source File: H264FrameDecoder.java From amazon-kinesis-video-streams-parser-library with Apache License 2.0 | 5 votes |
public BufferedImage decodeH264Frame(final Frame frame, final MkvTrackMetadata trackMetadata) { final ByteBuffer frameBuffer = frame.getFrameData(); final int pixelWidth = trackMetadata.getPixelWidth().get().intValue(); final int pixelHeight = trackMetadata.getPixelHeight().get().intValue(); codecPrivateData = trackMetadata.getCodecPrivateData().array(); log.debug("Decoding frames ... "); // Read the bytes that appear to comprise the header // See: https://www.matroska.org/technical/specs/index.html#simpleblock_structure final Picture rgb = Picture.create(pixelWidth, pixelHeight, ColorSpace.RGB); final BufferedImage bufferedImage = new BufferedImage(pixelWidth, pixelHeight, BufferedImage.TYPE_3BYTE_BGR); final AvcCBox avcC = AvcCBox.parseAvcCBox(ByteBuffer.wrap(codecPrivateData)); decoder.addSps(avcC.getSpsList()); decoder.addPps(avcC.getPpsList()); final Picture buf = Picture.create(pixelWidth + ((16 - (pixelWidth % 16)) % 16), pixelHeight + ((16 - (pixelHeight % 16)) % 16), ColorSpace.YUV420J); final List<ByteBuffer> byteBuffers = splitMOVPacket(frameBuffer, avcC); final Picture pic = decoder.decodeFrameFromNals(byteBuffers, buf.getData()); if (pic != null) { // Work around for color issues in JCodec // https://github.com/jcodec/jcodec/issues/59 // https://github.com/jcodec/jcodec/issues/192 final byte[][] dataTemp = new byte[3][pic.getData().length]; dataTemp[0] = pic.getPlaneData(0); dataTemp[1] = pic.getPlaneData(2); dataTemp[2] = pic.getPlaneData(1); final Picture tmpBuf = Picture.createPicture(pixelWidth, pixelHeight, dataTemp, ColorSpace.YUV420J); transform.transform(tmpBuf, rgb); AWTUtil.toBufferedImage(rgb, bufferedImage); frameCount++; } return bufferedImage; }
Example 4
Source File: ImageToH264MP4Encoder.java From CameraV with GNU General Public License v3.0 | 5 votes |
public void addFrame(Picture pic) throws IOException { if (toEncode == null) { toEncode = Picture.create(pic.getWidth(), pic.getHeight(), encoder.getSupportedColorSpaces()[0]); } if (_out == null) { // Allocate a buffer big enough to hold output frames _out = ByteBuffer.allocate(pic.getWidth() * pic.getHeight() * 6); } // Perform conversion transform.transform(pic, toEncode); // Encode image into H.264 frame, the result is stored in '_out' buffer _out.clear(); ByteBuffer result = encoder.encodeFrame(toEncode, _out); // Based on the frame above form correct MP4 packet spsList.clear(); ppsList.clear(); H264Utils.wipePS(result, spsList, ppsList); H264Utils.encodeMOVPacket(result); // Add packet to video track outTrack.addFrame(new MP4Packet(result, frameNo, 25, 1, frameNo, true, null, frameNo, 0)); frameNo++; }
Example 5
Source File: FrameRendererVisitor.java From amazon-kinesis-video-streams-parser-library with Apache License 2.0 | 4 votes |
@Override public void visit(final MkvDataElement dataElement) throws MkvElementVisitException { log.info("Got data element: {}", dataElement.getElementMetaData().getTypeInfo().getName()); final String dataElementName = dataElement.getElementMetaData().getTypeInfo().getName(); if ("SimpleBlock".equals(dataElementName)) { final MkvValue<Frame> frame = dataElement.getValueCopy(); final ByteBuffer frameBuffer = frame.getVal().getFrameData(); final MkvTrackMetadata trackMetadata = fragmentMetadataVisitor.getMkvTrackMetadata( frame.getVal().getTrackNumber()); final int pixelWidth = trackMetadata.getPixelWidth().get().intValue(); final int pixelHeight = trackMetadata.getPixelHeight().get().intValue(); codecPrivateData = trackMetadata.getCodecPrivateData().array(); log.debug("Decoding frames ... "); // Read the bytes that appear to comprise the header // See: https://www.matroska.org/technical/specs/index.html#simpleblock_structure final Picture rgb = Picture.create(pixelWidth, pixelHeight, ColorSpace.RGB); final BufferedImage renderImage = new BufferedImage( pixelWidth, pixelHeight, BufferedImage.TYPE_3BYTE_BGR); final AvcCBox avcC = AvcCBox.parseAvcCBox(ByteBuffer.wrap(codecPrivateData)); decoder.addSps(avcC.getSpsList()); decoder.addPps(avcC.getPpsList()); final Picture buf = Picture.create(pixelWidth + ((16 - (pixelWidth % 16)) % 16), pixelHeight + ((16 - (pixelHeight % 16)) % 16), ColorSpace.YUV420J); final List<ByteBuffer> byteBuffers = splitMOVPacket(frameBuffer, avcC); final Picture pic = decoder.decodeFrameFromNals(byteBuffers, buf.getData()); if (pic != null) { // Work around for color issues in JCodec // https://github.com/jcodec/jcodec/issues/59 // https://github.com/jcodec/jcodec/issues/192 final byte[][] dataTemp = new byte[3][pic.getData().length]; dataTemp[0] = pic.getPlaneData(0); dataTemp[1] = pic.getPlaneData(2); dataTemp[2] = pic.getPlaneData(1); final Picture tmpBuf = Picture.createPicture(pixelWidth, pixelHeight, dataTemp, ColorSpace.YUV420J); transform.transform(tmpBuf, rgb); AWTUtil.toBufferedImage(rgb, renderImage); kinesisVideoFrameViewer.update(renderImage); frameCount++; } } }
Example 6
Source File: YUVtoWebmMuxer.java From CameraV with GNU General Public License v3.0 | 3 votes |
public void encodeNativeFrame(ByteBuffer data, int width, int height, int frameIdx) throws IOException { Picture yuv = Picture.create(width, height, ColorSpace.YUV420); ByteBuffer ff = encoder.encodeFrame(yuv, data); Packet packet = new Packet(ff, frameIdx, 1, 1, frameIdx, true, null); muxer.addFrame(packet); }