Java Code Examples for org.bytedeco.javacpp.avcodec.AVPacket#pts()
The following examples show how to use
org.bytedeco.javacpp.avcodec.AVPacket#pts() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: FFmpegFrameRecorderPlus.java From easyCV with Apache License 2.0 | 6 votes |
public boolean recordPacket(AVPacket pkt) throws Exception { if (pkt == null) { return false; } AVStream in_stream = ifmt_ctx.streams(pkt.stream_index()); // pkt.dts(AV_NOPTS_VALUE); pkt.pts(AV_NOPTS_VALUE); pkt.pos(-1); try { if (in_stream.codec().codec_type() == AVMEDIA_TYPE_VIDEO && video_st != null) { pkt.stream_index(video_st.index()); pkt.duration((int) av_rescale_q(pkt.duration(), in_stream.codec().time_base(), video_st.codec().time_base())); pkt.dts(av_rescale_q_rnd(pkt.dts(), in_stream.time_base(), video_st.time_base(),(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX))); return writePacket(AVMEDIA_TYPE_VIDEO, pkt); } else if (in_stream.codec().codec_type() == AVMEDIA_TYPE_AUDIO && audio_st != null && (audioChannels > 0)) { pkt.stream_index(audio_st.index()); pkt.duration((int) av_rescale_q(pkt.duration(), in_stream.codec().time_base(), audio_st.codec().time_base())); pkt.dts(av_rescale_q_rnd(pkt.dts(), in_stream.time_base(), audio_st.time_base(),(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX))); return writePacket(AVMEDIA_TYPE_AUDIO, pkt); } }finally { av_packet_unref(pkt); } return true; }
Example 2
Source File: Muxer.java From JavaAV with GNU General Public License v2.0 | 6 votes |
private void writeVideoPacket(AVPacket avPacket) throws JavaAVException { AVRational codecTimeBase = videoEncoder.getCodec().getContext().time_base(); AVRational streamTimeBase = videoStream.time_base(); if (avPacket.pts() != AV_NOPTS_VALUE) avPacket.pts(av_rescale_q(avPacket.pts(), codecTimeBase, streamTimeBase)); if (avPacket.dts() != AV_NOPTS_VALUE) avPacket.dts(av_rescale_q(avPacket.dts(), codecTimeBase, streamTimeBase)); avPacket.stream_index(videoStream.index()); synchronized (formatContext) { /* write the compressed frame in the media file */ if (interleave && audioStream != null) { if (av_interleaved_write_frame(formatContext, avPacket) < 0) throw new JavaAVException("Could not write interleaved video frame."); } else { if (av_write_frame(formatContext, avPacket) < 0) throw new JavaAVException("Could not write video frame."); } } }
Example 3
Source File: testRecorder.java From easyCV with Apache License 2.0 | 5 votes |
public testRecorder grabtoPush() throws Exception { AVPacket pkt = new AVPacket(); for (int err_index = 0; av_read_frame(in_fc, pkt) >= 0;) { if(pkt==null) {//连续读到一定数量空包说明网络故障 err_index++; if(err_index>1000) { break; } continue; } err_index=0; AVStream in_stream = in_fc.streams(pkt.stream_index()); pkt.pts(AV_NOPTS_VALUE); pkt.pos(-1); if (in_stream.codec().codec_type() == AVMEDIA_TYPE_VIDEO && out_videoStream != null) { pkt.stream_index(out_videoStream.index()); pkt.duration((int) av_rescale_q(pkt.duration(), in_stream.codec().time_base(), out_videoStream.codec().time_base())); pkt.dts(av_rescale_q_rnd(pkt.dts(), in_stream.time_base(), out_videoStream.time_base(),(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX))); writePacket(AVMEDIA_TYPE_VIDEO, pkt); } else if (in_stream.codec().codec_type() == AVMEDIA_TYPE_AUDIO && out_audioStream != null ) { pkt.stream_index(out_audioStream.index()); pkt.duration((int) av_rescale_q(pkt.duration(), in_stream.codec().time_base(), out_audioStream.codec().time_base())); pkt.dts(av_rescale_q_rnd(pkt.dts(), in_stream.time_base(), out_audioStream.time_base(),(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX))); writePacket(AVMEDIA_TYPE_AUDIO, pkt); } } return this; }
Example 4
Source File: Muxer.java From JavaAV with GNU General Public License v2.0 | 5 votes |
private void writeAudioPacket(AVPacket avPacket) throws JavaAVException { AVRational timeBase = audioEncoder.getCodec().getContext().time_base(); AVRational streamTimeBase = audioStream.time_base(); if (avPacket.pts() != AV_NOPTS_VALUE) avPacket.pts(av_rescale_q(avPacket.pts(), timeBase, streamTimeBase)); if (avPacket.dts() != AV_NOPTS_VALUE) avPacket.dts(av_rescale_q(avPacket.dts(), timeBase, streamTimeBase)); if (avPacket.duration() > 0) avPacket.duration((int) av_rescale_q(avPacket.duration(), timeBase, audioStream.time_base())); avPacket.flags(avPacket.flags() | AV_PKT_FLAG_KEY); avPacket.stream_index(audioStream.index()); /* write the compressed frame in the media file */ synchronized (formatContext) { if (interleave && videoStream != null) { if (av_interleaved_write_frame(formatContext, avPacket) < 0) throw new JavaAVException("Could not write interleaved audio frame."); } else { if (av_write_frame(formatContext, avPacket) < 0) throw new JavaAVException("Could not write audio frame."); } } }