org.bytedeco.javacpp.avcodec.AVPacket Java Examples

The following examples show how to use org.bytedeco.javacpp.avcodec.AVPacket. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: FFmpegFrameRecorderPlus.java    From easyCV with Apache License 2.0 6 votes vote down vote up
private boolean writePacket(int mediaType, AVPacket avPacket) throws Exception {

        AVStream avStream = (mediaType == AVMEDIA_TYPE_VIDEO) ? audio_st : (mediaType == AVMEDIA_TYPE_AUDIO) ? video_st : null;
        String mediaTypeStr = (mediaType == AVMEDIA_TYPE_VIDEO) ? "video" : (mediaType == AVMEDIA_TYPE_AUDIO) ? "audio" : "unsupported media stream type";

        if (interleaved && avStream != null) {
            if (av_interleaved_write_frame(oc, avPacket) < 0) {
                return false;
            }
        } else {
            if (av_write_frame(oc, avPacket) < 0) {
                return false;
            }
        }
        return true;
    }
 
Example #2
Source File: FFmpegFrameRecorderPlus.java    From easyCV with Apache License 2.0 6 votes vote down vote up
public boolean recordPacket(AVPacket pkt) throws Exception {
        if (pkt == null) {
            return false;
        }
        AVStream in_stream = ifmt_ctx.streams(pkt.stream_index());
//      pkt.dts(AV_NOPTS_VALUE);
        pkt.pts(AV_NOPTS_VALUE);
        pkt.pos(-1);
        try {
	        if (in_stream.codec().codec_type() == AVMEDIA_TYPE_VIDEO && video_st != null) {
	            pkt.stream_index(video_st.index());
	            pkt.duration((int) av_rescale_q(pkt.duration(), in_stream.codec().time_base(), video_st.codec().time_base()));
	            pkt.dts(av_rescale_q_rnd(pkt.dts(), in_stream.time_base(), video_st.time_base(),(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX)));
	            return writePacket(AVMEDIA_TYPE_VIDEO, pkt);
	
	        } else if (in_stream.codec().codec_type() == AVMEDIA_TYPE_AUDIO && audio_st != null && (audioChannels > 0)) {
	            pkt.stream_index(audio_st.index());
	            pkt.duration((int) av_rescale_q(pkt.duration(), in_stream.codec().time_base(), audio_st.codec().time_base()));
	            pkt.dts(av_rescale_q_rnd(pkt.dts(), in_stream.time_base(), audio_st.time_base(),(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX)));
	            return writePacket(AVMEDIA_TYPE_AUDIO, pkt);
	        }
        }finally {
        	av_packet_unref(pkt);
        }
        return true;
    }
 
Example #3
Source File: Muxer.java    From JavaAV with GNU General Public License v2.0 6 votes vote down vote up
private void writeVideoPacket(AVPacket avPacket) throws JavaAVException {
	AVRational codecTimeBase = videoEncoder.getCodec().getContext().time_base();
	AVRational streamTimeBase = videoStream.time_base();

	if (avPacket.pts() != AV_NOPTS_VALUE)
		avPacket.pts(av_rescale_q(avPacket.pts(), codecTimeBase, streamTimeBase));

	if (avPacket.dts() != AV_NOPTS_VALUE)
		avPacket.dts(av_rescale_q(avPacket.dts(), codecTimeBase, streamTimeBase));

	avPacket.stream_index(videoStream.index());

	synchronized (formatContext) {
        /* write the compressed frame in the media file */
		if (interleave && audioStream != null) {
			if (av_interleaved_write_frame(formatContext, avPacket) < 0)
				throw new JavaAVException("Could not write interleaved video frame.");
		}
		else {
			if (av_write_frame(formatContext, avPacket) < 0)
				throw new JavaAVException("Could not write video frame.");
		}
	}
}
 
Example #4
Source File: Muxer.java    From JavaAV with GNU General Public License v2.0 6 votes vote down vote up
public MediaPacket[] addSamples(AudioFrame frame) throws JavaAVException {
	MediaPacket[] mediaPackets = audioEncoder.encodeAudio(frame);

	for (MediaPacket mediaPacket : mediaPackets) {
		if (mediaPacket == null)
			continue;

		AVPacket avPacket = mediaPacket.getAVPacket();

		if (avPacket == null)
			continue;

		writeAudioPacket(avPacket);
	}

	return mediaPackets;
}
 
Example #5
Source File: FFmpegFrameRecorderPlus.java    From easyCV with Apache License 2.0 6 votes vote down vote up
public FFmpegFrameRecorderPlus(String filename, int imageWidth, int imageHeight, int audioChannels) {
    this.filename      = filename;
    this.imageWidth    = imageWidth;
    this.imageHeight   = imageHeight;
    this.audioChannels = audioChannels;

    this.pixelFormat   = AV_PIX_FMT_NONE;
    this.videoCodec    = AV_CODEC_ID_NONE;
    this.videoBitrate  = 400000;
    this.frameRate     = 30;

    this.sampleFormat  = AV_SAMPLE_FMT_NONE;
    this.audioCodec    = AV_CODEC_ID_NONE;
    this.audioBitrate  = 64000;
    this.sampleRate    = 44100;

    this.interleaved = true;

    this.video_pkt = new AVPacket();
    this.audio_pkt = new AVPacket();
}
 
Example #6
Source File: Muxer.java    From JavaAV with GNU General Public License v2.0 5 votes vote down vote up
public MediaPacket addImage(VideoFrame frame) throws JavaAVException {
	MediaPacket mediaPacket = videoEncoder.encodeVideo(frame);

	if (mediaPacket != null) {
		AVPacket avPacket = mediaPacket.getAVPacket();

		writeVideoPacket(avPacket);
	}

	return mediaPacket;
}
 
Example #7
Source File: Muxer.java    From JavaAV with GNU General Public License v2.0 5 votes vote down vote up
private void writeAudioPacket(AVPacket avPacket) throws JavaAVException {
	AVRational timeBase = audioEncoder.getCodec().getContext().time_base();
	AVRational streamTimeBase = audioStream.time_base();

	if (avPacket.pts() != AV_NOPTS_VALUE)
		avPacket.pts(av_rescale_q(avPacket.pts(), timeBase, streamTimeBase));

	if (avPacket.dts() != AV_NOPTS_VALUE)
		avPacket.dts(av_rescale_q(avPacket.dts(), timeBase, streamTimeBase));

	if (avPacket.duration() > 0)
		avPacket.duration((int) av_rescale_q(avPacket.duration(), timeBase, audioStream.time_base()));

	avPacket.flags(avPacket.flags() | AV_PKT_FLAG_KEY);
	avPacket.stream_index(audioStream.index());

	/* write the compressed frame in the media file */
	synchronized (formatContext) {
		if (interleave && videoStream != null) {
			if (av_interleaved_write_frame(formatContext, avPacket) < 0)
				throw new JavaAVException("Could not write interleaved audio frame.");
		}
		else {
			if (av_write_frame(formatContext, avPacket) < 0)
				throw new JavaAVException("Could not write audio frame.");
		}
	}
}
 
Example #8
Source File: Muxer.java    From JavaAV with GNU General Public License v2.0 5 votes vote down vote up
private boolean flushAudio() throws JavaAVException {
	MediaPacket mediaPacket = audioEncoder.flushAudio();

	if (mediaPacket == null)
		return false;

	AVPacket avPacket = mediaPacket.getAVPacket();
	// write flushed audio
	writeAudioPacket(avPacket);

	return true;
}
 
Example #9
Source File: Muxer.java    From JavaAV with GNU General Public License v2.0 5 votes vote down vote up
private boolean flushVideo() throws JavaAVException {
	MediaPacket mediaPacket = videoEncoder.flushVideo();

	if (mediaPacket == null)
		return false;

	AVPacket avPacket = mediaPacket.getAVPacket();
	// write flushed video
	writeVideoPacket(avPacket);

	return mediaPacket.isKeyFrame();
}
 
Example #10
Source File: testRecorder.java    From easyCV with Apache License 2.0 5 votes vote down vote up
public testRecorder grabtoPush() throws Exception {
	AVPacket pkt = new AVPacket();

	for (int err_index = 0; av_read_frame(in_fc, pkt) >= 0;) {
		if(pkt==null) {//连续读到一定数量空包说明网络故障
			err_index++;
			if(err_index>1000) {
				break;
			}
			continue;
		}
		err_index=0;
		
		AVStream in_stream = in_fc.streams(pkt.stream_index());
		pkt.pts(AV_NOPTS_VALUE);
  		pkt.pos(-1);
		if (in_stream.codec().codec_type() == AVMEDIA_TYPE_VIDEO && out_videoStream != null) {
            pkt.stream_index(out_videoStream.index());
            pkt.duration((int) av_rescale_q(pkt.duration(), in_stream.codec().time_base(), out_videoStream.codec().time_base()));
            pkt.dts(av_rescale_q_rnd(pkt.dts(), in_stream.time_base(), out_videoStream.time_base(),(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX)));
            writePacket(AVMEDIA_TYPE_VIDEO, pkt);
        } else if (in_stream.codec().codec_type() == AVMEDIA_TYPE_AUDIO && out_audioStream != null ) {
            pkt.stream_index(out_audioStream.index());
            pkt.duration((int) av_rescale_q(pkt.duration(), in_stream.codec().time_base(), out_audioStream.codec().time_base()));
            pkt.dts(av_rescale_q_rnd(pkt.dts(), in_stream.time_base(), out_audioStream.time_base(),(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX)));
            writePacket(AVMEDIA_TYPE_AUDIO, pkt);
        }
	}
	return this;
}
 
Example #11
Source File: TestFFmpeg.java    From easyCV with Apache License 2.0 4 votes vote down vote up
/**
	 * 把YUVJ420P数据编码保存成jpg图片
	 * @param pFrame -YUVJ420P数据
	 * @param index -序号
	 * @return
	 */
	private static int saveImg(AVFrame pFrame, int index,String out_file) {
		int width= pFrame.width(), height= pFrame.height();
		// 分配AVFormatContext对象
		AVFormatContext pFormatCtx = avformat_alloc_context();
		// 设置输出文件格式
		pFormatCtx.oformat(av_guess_format("PNG", null, null));
		if (pFormatCtx.oformat() == null) {
			return -1;
		}
		// 创建并初始化一个和该url相关的AVIOContext
		AVIOContext pb = new AVIOContext();
		if (avio_open(pb, out_file, AVIO_FLAG_READ_WRITE) < 0) {
			System.err.println("Couldn't open output file.");
			return -1;
		}
		pFormatCtx.pb(pb);
		// 构建一个新stream
		AVCodec codec = null;
		AVStream pAVStream = avformat_new_stream(pFormatCtx, codec);
		if (pAVStream == null) {
			return -1;
		}
		// 设置该stream的信息
		AVCodecContext pCodecCtx = pAVStream.codec();
		pCodecCtx.codec_id(pFormatCtx.oformat().video_codec());
		pCodecCtx.codec_type(AVMEDIA_TYPE_VIDEO);
		pCodecCtx.pix_fmt(pFrame.format());
		pCodecCtx.width(width);
		pCodecCtx.height(height);
		pCodecCtx.time_base().num(1);
		pCodecCtx.time_base().den(25);
		// Begin Output some information
		av_dump_format(pFormatCtx, 0, out_file, 1);
		// End Output some information
		// 查找解码器
		AVCodec pCodec = avcodec_find_encoder(pCodecCtx.codec_id());
		if (pCodec == null) {
			System.err.println("Codec not found.");
			return -1;
		}
		// 设置pCodecCtx的解码器为pCodec
		if (avcodec_open2(pCodecCtx, pCodec, (PointerPointer) null) < 0) {
			System.err.println("Could not open codec.");
			return -1;
		}

		// Write Header
		avformat_write_header(pFormatCtx, (PointerPointer) null);

		int y_size = width * height;

		// 给AVPacket分配足够大的空间
		AVPacket pkt = new AVPacket();
		av_new_packet(pkt, y_size * 3);
		//
		int[] got_picture_arr = { 0 };
//		IntPointer got_picture = new IntPointer(got_picture_arr);
		int ret = avcodec_encode_video2(pCodecCtx, pkt, pFrame, got_picture_arr);
		if (ret < 0) {
			System.err.println("Encode Error.\n");
			return -1;
		}
		if (pkt != null && !pkt.isNull()) {
			// pkt.stream_index = pAVStream->index;
			ret = av_write_frame(pFormatCtx, pkt);
		}
		// Write Trailer
		if (av_write_trailer(pFormatCtx) >= 0) {
			System.err.println("Encode Successful.");
		}

		av_free_packet(pkt);

		if (pAVStream != null) {
			avcodec_close(pAVStream.codec());
		}

		if (pFormatCtx != null) {
			avio_close(pFormatCtx.pb());
			avformat_free_context(pFormatCtx);
		}

		return 0;
	}
 
Example #12
Source File: GrabberTemplate4.java    From easyCV with Apache License 2.0 4 votes vote down vote up
/**
 * 开始转码之前的一些初始化操作
 * @param url
 * @param fmt
 * @return
 */
private boolean initGrabber(String url,int fmt) {

	// Open video file
	pFormatCtx=openInput(url);
	
	// Find video info
	findStreamInfo(pFormatCtx,null);
	
	// Find a video stream
	videoStreamIndex=findVideoStreamIndex(pFormatCtx);
	
	// Find the decoder for the video stream
	pCodecCtx= findAndOpenCodec(pFormatCtx,videoStreamIndex);
	
	//set image size
	srcWidth = pCodecCtx.width();
	srcHeight = pCodecCtx.height();
	
	//if width/height is null,use the width/height of video source as the default
	if(width==null||height==null) {
		width=srcWidth;
		height=srcHeight;
	}
	
	//scaling/conversion operations by using sws_scale().
	DoublePointer param=null;
	sws_ctx = sws_getContext(srcWidth, srcHeight, pCodecCtx.pix_fmt(), width, height,fmt, SWS_FAST_BILINEAR, null, null, param);
	
	packet = new AVPacket();
	
	// Allocate video frame
	pFrame = av_frame_alloc();
	
	// Allocate an AVFrame structure
	outFrameRGB = av_frame_alloc();
	outFrameRGB.width(width);
	outFrameRGB.height(height);
	outFrameRGB.format(fmt);
	
	return true;
}
 
Example #13
Source File: FFmpegRecorder.java    From easyCV with Apache License 2.0 4 votes vote down vote up
/**
	 * 抓取视频帧(默认跳过音频帧和空帧)
	 * @param url
	 * @param fmt - 像素格式,比如AV_PIX_FMT_BGR24
	 * @return
	 * @throws IOException
	 */
	public ByteBuffer grabVideoFrame(String url,int fmt) throws IOException {
		
		// Open video file
		AVFormatContext pFormatCtx=openInput(url);
//		if(url.indexOf("rtmp")>=0) {
			//解决rtmp检索时间过长问题
		    //限制最大读取缓存
		    pFormatCtx.probesize(PROBESIZE);//设置500k能保证高清视频也能读取到关键帧
		  //限制avformat_find_stream_info最大持续时长,设置成3秒
		    pFormatCtx.max_analyze_duration(MAX_ANALYZE_DURATION);
//		}
		// Retrieve stream information
		pFormatCtx=findStreamInfo(pFormatCtx);
		// Dump information about file onto standard error
		//av_dump_format(pFormatCtx, 0, url, 0);

		//Find a video stream
		int videoStream=findVideoStreamIndex(pFormatCtx);
		AVCodecContext pCodecCtx =findVideoStream(pFormatCtx,videoStream);
		
		// Find the decoder for the video stream
		pCodecCtx= findAndOpenCodec(pCodecCtx);
		// Allocate video frame
		AVFrame pFrame = av_frame_alloc();
		AVPacket packet = new AVPacket();
		int[] frameFinished = new int[1];
		
		// Read frames and save first five frames to disk
		while (av_read_frame(pFormatCtx, packet) >= 0) {
			// Is this a packet from the video stream?
			if (packet.stream_index() == videoStream) {
				// Decode video frame
				avcodec_decode_video2(pCodecCtx, pFrame, frameFinished, packet);
				// Did we get a video frame?
				if (frameFinished != null&&frameFinished[0] != 0) {
					
				}
			}
			// Free the packet that was allocated by av_read_frame
			av_free_packet(packet);
		}
		//ge
		av_free(pFrame);// Free the YUV frame
		avcodec_close(pCodecCtx);// Close the codec
		avformat_close_input(pFormatCtx);// Close the video file
		return null;
	}
 
Example #14
Source File: GrabberTmplate.java    From easyCV with Apache License 2.0 4 votes vote down vote up
/**
	 * 抓取视频帧(默认跳过音频帧和空帧)
	 * @param url
	 * @param fmt - 像素格式,比如AV_PIX_FMT_BGR24
	 * @return
	 * @throws IOException
	 */
	public ByteBuffer grabVideoFrame(String url,int fmt) throws IOException {
		// Open video file
		AVFormatContext pFormatCtx=openInput(url);

		// Retrieve stream information
		pFormatCtx=findStreamInfo(pFormatCtx);

		// Dump information about file onto standard error
		//av_dump_format(pFormatCtx, 0, url, 0);

		//Find a video stream
		int videoStream=findVideoStreamIndex(pFormatCtx);
		AVCodecContext pCodecCtx =findVideoStream(pFormatCtx,videoStream);
		
		// Find the decoder for the video stream
		pCodecCtx= findAndOpenCodec(pCodecCtx);

		// Allocate video frame
		AVFrame pFrame = av_frame_alloc();
		//Allocate an AVFrame structure
		AVFrame pFrameRGB = av_frame_alloc();

		width = pCodecCtx.width();
		height = pCodecCtx.height();
		pFrameRGB.width(width);
		pFrameRGB.height(height);
		pFrameRGB.format(fmt);

		// Determine required buffer size and allocate buffer
		int numBytes = avpicture_get_size(fmt, width, height);

		SwsContext sws_ctx = sws_getContext(width, height, pCodecCtx.pix_fmt(), width, height,fmt, SWS_BILINEAR, null, null, (DoublePointer) null);

		BytePointer buffer = new BytePointer(av_malloc(numBytes));
		// Assign appropriate parts of buffer to image planes in pFrameRGB
		// Note that pFrameRGB is an AVFrame, but AVFrame is a superset
		// of AVPicture
		avpicture_fill(new AVPicture(pFrameRGB), buffer, fmt, width, height);
		AVPacket packet = new AVPacket();
		int[] frameFinished = new int[1];
		try {
			// Read frames and save first five frames to disk
			while (av_read_frame(pFormatCtx, packet) >= 0) {
				// Is this a packet from the video stream?
				if (packet.stream_index() == videoStream) {
					// Decode video frame
					avcodec_decode_video2(pCodecCtx, pFrame, frameFinished, packet);
					// Did we get a video frame?
					if (frameFinished != null&&frameFinished[0] > 0) {
						// Convert the image from its native format to BGR
						sws_scale(sws_ctx, pFrame.data(), pFrame.linesize(), 0, height, pFrameRGB.data(),pFrameRGB.linesize());
						//Convert BGR to ByteBuffer
						saveFrame(pFrameRGB, width, height);
					}
				}
				// Free the packet that was allocated by av_read_frame
				av_free_packet(packet);
			}
			return null;
		}finally {
			//Don't free buffer
//			av_free(buffer);
			av_free(pFrameRGB);// Free the RGB image
			av_free(pFrame);// Free the YUV frame
			sws_freeContext(sws_ctx);//Free SwsContext
			avcodec_close(pCodecCtx);// Close the codec
			avformat_close_input(pFormatCtx);// Close the video file
		}
	}
 
Example #15
Source File: testRecorder.java    From easyCV with Apache License 2.0 4 votes vote down vote up
private void writePacket(int mediaType, AVPacket avPacket) throws Exception {
    AVStream avStream = (mediaType == AVMEDIA_TYPE_VIDEO) ? out_audioStream : (mediaType == AVMEDIA_TYPE_AUDIO) ? out_videoStream : null;
    if ((ret = av_interleaved_write_frame(fc, avPacket)) < 0) {
           throw new Exception("av_interleaved_write_frame() error " + ret + " while writing interleaved packet.");
       }
}
 
Example #16
Source File: MediaPacket.java    From JavaAV with GNU General Public License v2.0 4 votes vote down vote up
MediaPacket(AVPacket avPacket) {
	this.avPacket = avPacket;
}
 
Example #17
Source File: MediaPacket.java    From JavaAV with GNU General Public License v2.0 4 votes vote down vote up
AVPacket getAVPacket() {
	return avPacket;
}
 
Example #18
Source File: Coder.java    From JavaAV with GNU General Public License v2.0 4 votes vote down vote up
/**
 * Initializes the {@code Coder} with codec options that may contain specific
 * codec parameters.
 *
 * @param options codec options.
 *
 * @throws JavaAVException if {@code Coder} could not be opened.
 */
public void open(Map<String, String> options) throws JavaAVException {
	if (state == State.Opened) {
		logger.warn("Trying to open an already opened Coder. Aborted.");
		return;
	}
	if (codec == null)
		throw new JavaAVException("Codec is null. Aborted.");

	if (avContext == null)
		avContext = avcodec_alloc_context3(codec.getCodec());

	if (avContext == null)
		throw new JavaAVException("No codec context available for codec " + codec.getName());

	// set configuration parameters
	if (pixelFormat != null) {
		avContext.pix_fmt(pixelFormat.value());
	}
	if (sampleFormat != null) {
		int sampleBitSize = av_get_bytes_per_sample(sampleFormat.value()) * 8;
		avContext.sample_fmt(sampleFormat.value());
		avContext.bits_per_raw_sample(sampleBitSize);
	}
	if (imageWidth > 0) {
		avContext.width(imageWidth);
	}
	if (imageHeight > 0) {
		avContext.height(imageHeight);
	}
	if (gopSize > 0) {
		avContext.gop_size(gopSize);
	}
	if (audioChannels > 0) {
		avContext.channels(audioChannels);
		avContext.channel_layout(av_get_default_channel_layout(audioChannels));
	}
	if (sampleRate > 0) {
		avContext.sample_rate(sampleRate);
		avContext.time_base().num(1).den(sampleRate);
	}
	if (frameRate > 0) {
		avContext.time_base(av_inv_q(av_d2q(frameRate, 1001000)));
	}
	if (bitrate > 0) {
		avContext.bit_rate(bitrate);
	}
	if (profile > 0) {
		avContext.profile(profile);
	}
	if (quality > -10) {
		avContext.flags(avContext.flags() | avcodec.CODEC_FLAG_QSCALE);
		avContext.global_quality((int) Math.round(FF_QP2LAMBDA * quality));
	}
	for (CodecFlag flag : flags)
		avContext.flags(avContext.flags() | flag.value());

	AVDictionary avDictionary = new AVDictionary(null);

	if (getQuality() >= 0)
		av_dict_set(avDictionary, "crf", getQuality() + "", 0);

	if (options != null) {
		for (Entry<String, String> e : options.entrySet()) {
			av_dict_set(avDictionary, e.getKey(), e.getValue(), 0);
		}
	}

	if (codec.open(avDictionary, avContext) < 0)
		throw new JavaAVException("Could not open codec.");

	av_dict_free(avDictionary);

	avFrame = avcodec_alloc_frame();

	if (avFrame == null)
		throw new JavaAVException("Could not allocate frame.");

	avPacket = new AVPacket();

	state = State.Opened;
}