Java Code Examples for org.bytedeco.javacpp.avcodec.AVPacket#stream_index()
The following examples show how to use
org.bytedeco.javacpp.avcodec.AVPacket#stream_index() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: FFmpegFrameRecorderPlus.java From easyCV with Apache License 2.0 | 6 votes |
public boolean recordPacket(AVPacket pkt) throws Exception { if (pkt == null) { return false; } AVStream in_stream = ifmt_ctx.streams(pkt.stream_index()); // pkt.dts(AV_NOPTS_VALUE); pkt.pts(AV_NOPTS_VALUE); pkt.pos(-1); try { if (in_stream.codec().codec_type() == AVMEDIA_TYPE_VIDEO && video_st != null) { pkt.stream_index(video_st.index()); pkt.duration((int) av_rescale_q(pkt.duration(), in_stream.codec().time_base(), video_st.codec().time_base())); pkt.dts(av_rescale_q_rnd(pkt.dts(), in_stream.time_base(), video_st.time_base(),(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX))); return writePacket(AVMEDIA_TYPE_VIDEO, pkt); } else if (in_stream.codec().codec_type() == AVMEDIA_TYPE_AUDIO && audio_st != null && (audioChannels > 0)) { pkt.stream_index(audio_st.index()); pkt.duration((int) av_rescale_q(pkt.duration(), in_stream.codec().time_base(), audio_st.codec().time_base())); pkt.dts(av_rescale_q_rnd(pkt.dts(), in_stream.time_base(), audio_st.time_base(),(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX))); return writePacket(AVMEDIA_TYPE_AUDIO, pkt); } }finally { av_packet_unref(pkt); } return true; }
Example 2
Source File: Muxer.java From JavaAV with GNU General Public License v2.0 | 6 votes |
private void writeVideoPacket(AVPacket avPacket) throws JavaAVException { AVRational codecTimeBase = videoEncoder.getCodec().getContext().time_base(); AVRational streamTimeBase = videoStream.time_base(); if (avPacket.pts() != AV_NOPTS_VALUE) avPacket.pts(av_rescale_q(avPacket.pts(), codecTimeBase, streamTimeBase)); if (avPacket.dts() != AV_NOPTS_VALUE) avPacket.dts(av_rescale_q(avPacket.dts(), codecTimeBase, streamTimeBase)); avPacket.stream_index(videoStream.index()); synchronized (formatContext) { /* write the compressed frame in the media file */ if (interleave && audioStream != null) { if (av_interleaved_write_frame(formatContext, avPacket) < 0) throw new JavaAVException("Could not write interleaved video frame."); } else { if (av_write_frame(formatContext, avPacket) < 0) throw new JavaAVException("Could not write video frame."); } } }
Example 3
Source File: testRecorder.java From easyCV with Apache License 2.0 | 5 votes |
public testRecorder grabtoPush() throws Exception { AVPacket pkt = new AVPacket(); for (int err_index = 0; av_read_frame(in_fc, pkt) >= 0;) { if(pkt==null) {//连续读到一定数量空包说明网络故障 err_index++; if(err_index>1000) { break; } continue; } err_index=0; AVStream in_stream = in_fc.streams(pkt.stream_index()); pkt.pts(AV_NOPTS_VALUE); pkt.pos(-1); if (in_stream.codec().codec_type() == AVMEDIA_TYPE_VIDEO && out_videoStream != null) { pkt.stream_index(out_videoStream.index()); pkt.duration((int) av_rescale_q(pkt.duration(), in_stream.codec().time_base(), out_videoStream.codec().time_base())); pkt.dts(av_rescale_q_rnd(pkt.dts(), in_stream.time_base(), out_videoStream.time_base(),(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX))); writePacket(AVMEDIA_TYPE_VIDEO, pkt); } else if (in_stream.codec().codec_type() == AVMEDIA_TYPE_AUDIO && out_audioStream != null ) { pkt.stream_index(out_audioStream.index()); pkt.duration((int) av_rescale_q(pkt.duration(), in_stream.codec().time_base(), out_audioStream.codec().time_base())); pkt.dts(av_rescale_q_rnd(pkt.dts(), in_stream.time_base(), out_audioStream.time_base(),(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX))); writePacket(AVMEDIA_TYPE_AUDIO, pkt); } } return this; }
Example 4
Source File: Muxer.java From JavaAV with GNU General Public License v2.0 | 5 votes |
private void writeAudioPacket(AVPacket avPacket) throws JavaAVException { AVRational timeBase = audioEncoder.getCodec().getContext().time_base(); AVRational streamTimeBase = audioStream.time_base(); if (avPacket.pts() != AV_NOPTS_VALUE) avPacket.pts(av_rescale_q(avPacket.pts(), timeBase, streamTimeBase)); if (avPacket.dts() != AV_NOPTS_VALUE) avPacket.dts(av_rescale_q(avPacket.dts(), timeBase, streamTimeBase)); if (avPacket.duration() > 0) avPacket.duration((int) av_rescale_q(avPacket.duration(), timeBase, audioStream.time_base())); avPacket.flags(avPacket.flags() | AV_PKT_FLAG_KEY); avPacket.stream_index(audioStream.index()); /* write the compressed frame in the media file */ synchronized (formatContext) { if (interleave && videoStream != null) { if (av_interleaved_write_frame(formatContext, avPacket) < 0) throw new JavaAVException("Could not write interleaved audio frame."); } else { if (av_write_frame(formatContext, avPacket) < 0) throw new JavaAVException("Could not write audio frame."); } } }
Example 5
Source File: GrabberTmplate.java From easyCV with Apache License 2.0 | 4 votes |
/** * 抓取视频帧(默认跳过音频帧和空帧) * @param url * @param fmt - 像素格式,比如AV_PIX_FMT_BGR24 * @return * @throws IOException */ public ByteBuffer grabVideoFrame(String url,int fmt) throws IOException { // Open video file AVFormatContext pFormatCtx=openInput(url); // Retrieve stream information pFormatCtx=findStreamInfo(pFormatCtx); // Dump information about file onto standard error //av_dump_format(pFormatCtx, 0, url, 0); //Find a video stream int videoStream=findVideoStreamIndex(pFormatCtx); AVCodecContext pCodecCtx =findVideoStream(pFormatCtx,videoStream); // Find the decoder for the video stream pCodecCtx= findAndOpenCodec(pCodecCtx); // Allocate video frame AVFrame pFrame = av_frame_alloc(); //Allocate an AVFrame structure AVFrame pFrameRGB = av_frame_alloc(); width = pCodecCtx.width(); height = pCodecCtx.height(); pFrameRGB.width(width); pFrameRGB.height(height); pFrameRGB.format(fmt); // Determine required buffer size and allocate buffer int numBytes = avpicture_get_size(fmt, width, height); SwsContext sws_ctx = sws_getContext(width, height, pCodecCtx.pix_fmt(), width, height,fmt, SWS_BILINEAR, null, null, (DoublePointer) null); BytePointer buffer = new BytePointer(av_malloc(numBytes)); // Assign appropriate parts of buffer to image planes in pFrameRGB // Note that pFrameRGB is an AVFrame, but AVFrame is a superset // of AVPicture avpicture_fill(new AVPicture(pFrameRGB), buffer, fmt, width, height); AVPacket packet = new AVPacket(); int[] frameFinished = new int[1]; try { // Read frames and save first five frames to disk while (av_read_frame(pFormatCtx, packet) >= 0) { // Is this a packet from the video stream? if (packet.stream_index() == videoStream) { // Decode video frame avcodec_decode_video2(pCodecCtx, pFrame, frameFinished, packet); // Did we get a video frame? if (frameFinished != null&&frameFinished[0] > 0) { // Convert the image from its native format to BGR sws_scale(sws_ctx, pFrame.data(), pFrame.linesize(), 0, height, pFrameRGB.data(),pFrameRGB.linesize()); //Convert BGR to ByteBuffer saveFrame(pFrameRGB, width, height); } } // Free the packet that was allocated by av_read_frame av_free_packet(packet); } return null; }finally { //Don't free buffer // av_free(buffer); av_free(pFrameRGB);// Free the RGB image av_free(pFrame);// Free the YUV frame sws_freeContext(sws_ctx);//Free SwsContext avcodec_close(pCodecCtx);// Close the codec avformat_close_input(pFormatCtx);// Close the video file } }
Example 6
Source File: FFmpegRecorder.java From easyCV with Apache License 2.0 | 4 votes |
/** * 抓取视频帧(默认跳过音频帧和空帧) * @param url * @param fmt - 像素格式,比如AV_PIX_FMT_BGR24 * @return * @throws IOException */ public ByteBuffer grabVideoFrame(String url,int fmt) throws IOException { // Open video file AVFormatContext pFormatCtx=openInput(url); // if(url.indexOf("rtmp")>=0) { //解决rtmp检索时间过长问题 //限制最大读取缓存 pFormatCtx.probesize(PROBESIZE);//设置500k能保证高清视频也能读取到关键帧 //限制avformat_find_stream_info最大持续时长,设置成3秒 pFormatCtx.max_analyze_duration(MAX_ANALYZE_DURATION); // } // Retrieve stream information pFormatCtx=findStreamInfo(pFormatCtx); // Dump information about file onto standard error //av_dump_format(pFormatCtx, 0, url, 0); //Find a video stream int videoStream=findVideoStreamIndex(pFormatCtx); AVCodecContext pCodecCtx =findVideoStream(pFormatCtx,videoStream); // Find the decoder for the video stream pCodecCtx= findAndOpenCodec(pCodecCtx); // Allocate video frame AVFrame pFrame = av_frame_alloc(); AVPacket packet = new AVPacket(); int[] frameFinished = new int[1]; // Read frames and save first five frames to disk while (av_read_frame(pFormatCtx, packet) >= 0) { // Is this a packet from the video stream? if (packet.stream_index() == videoStream) { // Decode video frame avcodec_decode_video2(pCodecCtx, pFrame, frameFinished, packet); // Did we get a video frame? if (frameFinished != null&&frameFinished[0] != 0) { } } // Free the packet that was allocated by av_read_frame av_free_packet(packet); } //ge av_free(pFrame);// Free the YUV frame avcodec_close(pCodecCtx);// Close the codec avformat_close_input(pFormatCtx);// Close the video file return null; }