Java Code Examples for android.media.MediaExtractor#getSampleTrackIndex()
The following examples show how to use
android.media.MediaExtractor#getSampleTrackIndex() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: VideoUtil.java From VideoProcessor with Apache License 2.0 | 5 votes |
public static void seekToLastFrame(MediaExtractor extractor, int trackIndex, int durationMs) { int seekToDuration = durationMs * 1000; if (extractor.getSampleTrackIndex() != trackIndex) { extractor.selectTrack(trackIndex); } extractor.seekTo(seekToDuration, MediaExtractor.SEEK_TO_PREVIOUS_SYNC); while (seekToDuration > 0 && extractor.getSampleTrackIndex() != trackIndex) { seekToDuration -= 10000; extractor.seekTo(seekToDuration, MediaExtractor.SEEK_TO_PREVIOUS_SYNC); } }
Example 2
Source File: VideoController.java From VideoCompressor with Apache License 2.0 | 4 votes |
@TargetApi(16) private long readAndWriteTrack(MediaExtractor extractor, MP4Builder mediaMuxer, MediaCodec.BufferInfo info, long start, long end, File file, boolean isAudio) throws Exception { int trackIndex = selectTrack(extractor, isAudio); if (trackIndex >= 0) { extractor.selectTrack(trackIndex); MediaFormat trackFormat = extractor.getTrackFormat(trackIndex); int muxerTrackIndex = mediaMuxer.addTrack(trackFormat, isAudio); int maxBufferSize = trackFormat.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE); boolean inputDone = false; if (start > 0) { extractor.seekTo(start, MediaExtractor.SEEK_TO_PREVIOUS_SYNC); } else { extractor.seekTo(0, MediaExtractor.SEEK_TO_PREVIOUS_SYNC); } ByteBuffer buffer = ByteBuffer.allocateDirect(maxBufferSize); long startTime = -1; while (!inputDone) { boolean eof = false; int index = extractor.getSampleTrackIndex(); if (index == trackIndex) { info.size = extractor.readSampleData(buffer, 0); if (info.size < 0) { info.size = 0; eof = true; } else { info.presentationTimeUs = extractor.getSampleTime(); if (start > 0 && startTime == -1) { startTime = info.presentationTimeUs; } if (end < 0 || info.presentationTimeUs < end) { info.offset = 0; info.flags = extractor.getSampleFlags(); if (mediaMuxer.writeSampleData(muxerTrackIndex, buffer, info, isAudio)) { // didWriteData(messageObject, file, false, false); } extractor.advance(); } else { eof = true; } } } else if (index == -1) { eof = true; } if (eof) { inputDone = true; } } extractor.unselectTrack(trackIndex); return startTime; } return -1; }
Example 3
Source File: MediaController.java From react-native-video-helper with MIT License | 4 votes |
@TargetApi(16) private long readAndWriteTrack(MediaExtractor extractor, MP4Builder mediaMuxer, MediaCodec.BufferInfo info, long start, long end, File file, boolean isAudio) throws Exception { int trackIndex = selectTrack(extractor, isAudio); if (trackIndex >= 0) { extractor.selectTrack(trackIndex); MediaFormat trackFormat = extractor.getTrackFormat(trackIndex); int muxerTrackIndex = mediaMuxer.addTrack(trackFormat, isAudio); int maxBufferSize = trackFormat.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE); boolean inputDone = false; if (start > 0) { extractor.seekTo(start, MediaExtractor.SEEK_TO_PREVIOUS_SYNC); } else { extractor.seekTo(0, MediaExtractor.SEEK_TO_PREVIOUS_SYNC); } ByteBuffer buffer = ByteBuffer.allocateDirect(maxBufferSize); long startTime = -1; while (!inputDone) { boolean eof = false; int index = extractor.getSampleTrackIndex(); if (index == trackIndex) { info.size = extractor.readSampleData(buffer, 0); if (info.size < 0) { info.size = 0; eof = true; } else { info.presentationTimeUs = extractor.getSampleTime(); if (start > 0 && startTime == -1) { startTime = info.presentationTimeUs; } if (end < 0 || info.presentationTimeUs < end) { info.offset = 0; info.flags = extractor.getSampleFlags(); if (mediaMuxer.writeSampleData(muxerTrackIndex, buffer, info, isAudio)) { // didWriteData(messageObject, file, false, false); } extractor.advance(); } else { eof = true; } } } else if (index == -1) { eof = true; } if (eof) { inputDone = true; } } extractor.unselectTrack(trackIndex); return startTime; } return -1; }
Example 4
Source File: VideoController.java From VideoCompressor with Apache License 2.0 | 4 votes |
@TargetApi(16) private long readAndWriteTrack(MediaExtractor extractor, MP4Builder mediaMuxer, MediaCodec.BufferInfo info, long start, long end, File file, boolean isAudio) throws Exception { int trackIndex = selectTrack(extractor, isAudio); if (trackIndex >= 0) { extractor.selectTrack(trackIndex); MediaFormat trackFormat = extractor.getTrackFormat(trackIndex); int muxerTrackIndex = mediaMuxer.addTrack(trackFormat, isAudio); int maxBufferSize = trackFormat.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE); boolean inputDone = false; if (start > 0) { extractor.seekTo(start, MediaExtractor.SEEK_TO_PREVIOUS_SYNC); } else { extractor.seekTo(0, MediaExtractor.SEEK_TO_PREVIOUS_SYNC); } ByteBuffer buffer = ByteBuffer.allocateDirect(maxBufferSize); long startTime = -1; while (!inputDone) { boolean eof = false; int index = extractor.getSampleTrackIndex(); if (index == trackIndex) { info.size = extractor.readSampleData(buffer, 0); if (info.size < 0) { info.size = 0; eof = true; } else { info.presentationTimeUs = extractor.getSampleTime(); if (start > 0 && startTime == -1) { startTime = info.presentationTimeUs; } if (end < 0 || info.presentationTimeUs < end) { info.offset = 0; info.flags = extractor.getSampleFlags(); if (mediaMuxer.writeSampleData(muxerTrackIndex, buffer, info, isAudio)) { // didWriteData(messageObject, file, false, false); } extractor.advance(); } else { eof = true; } } } else if (index == -1) { eof = true; } if (eof) { inputDone = true; } } extractor.unselectTrack(trackIndex); return startTime; } return -1; }
Example 5
Source File: MediaController.java From talk-android with MIT License | 4 votes |
@TargetApi(16) private long readAndWriteTrack(MediaExtractor extractor, MP4Builder mediaMuxer, MediaCodec.BufferInfo info, long start, long end, File file, boolean isAudio) throws Exception { int trackIndex = selectTrack(extractor, isAudio); if (trackIndex >= 0) { extractor.selectTrack(trackIndex); MediaFormat trackFormat = extractor.getTrackFormat(trackIndex); int muxerTrackIndex = mediaMuxer.addTrack(trackFormat, isAudio); int maxBufferSize = trackFormat.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE); boolean inputDone = false; if (start > 0) { extractor.seekTo(start, MediaExtractor.SEEK_TO_PREVIOUS_SYNC); } else { extractor.seekTo(0, MediaExtractor.SEEK_TO_PREVIOUS_SYNC); } ByteBuffer buffer = ByteBuffer.allocateDirect(maxBufferSize); long startTime = -1; while (!inputDone) { boolean eof = false; int index = extractor.getSampleTrackIndex(); if (index == trackIndex) { info.size = extractor.readSampleData(buffer, 0); if (info.size < 0) { info.size = 0; eof = true; } else { info.presentationTimeUs = extractor.getSampleTime(); if (start > 0 && startTime == -1) { startTime = info.presentationTimeUs; } if (end < 0 || info.presentationTimeUs < end) { info.offset = 0; info.flags = extractor.getSampleFlags(); if (mediaMuxer.writeSampleData(muxerTrackIndex, buffer, info, isAudio)) { // didWriteData(messageObject, file, false, false); } extractor.advance(); } else { eof = true; } } } else if (index == -1) { eof = true; } if (eof) { inputDone = true; } } extractor.unselectTrack(trackIndex); return startTime; } return -1; }
Example 6
Source File: MediaController.java From SiliCompressor with Apache License 2.0 | 4 votes |
@TargetApi(16) private long readAndWriteTrack(MediaExtractor extractor, MP4Builder mediaMuxer, MediaCodec.BufferInfo info, long start, long end, File file, boolean isAudio) throws Exception { int trackIndex = selectTrack(extractor, isAudio); if (trackIndex >= 0) { extractor.selectTrack(trackIndex); MediaFormat trackFormat = extractor.getTrackFormat(trackIndex); int muxerTrackIndex = mediaMuxer.addTrack(trackFormat, isAudio); int maxBufferSize = trackFormat.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE); boolean inputDone = false; if (start > 0) { extractor.seekTo(start, MediaExtractor.SEEK_TO_PREVIOUS_SYNC); } else { extractor.seekTo(0, MediaExtractor.SEEK_TO_PREVIOUS_SYNC); } ByteBuffer buffer = ByteBuffer.allocateDirect(maxBufferSize); long startTime = -1; while (!inputDone) { boolean eof = false; int index = extractor.getSampleTrackIndex(); if (index == trackIndex) { info.size = extractor.readSampleData(buffer, 0); if (info.size < 0) { info.size = 0; eof = true; } else { info.presentationTimeUs = extractor.getSampleTime(); if (start > 0 && startTime == -1) { startTime = info.presentationTimeUs; } if (end < 0 || info.presentationTimeUs < end) { info.offset = 0; info.flags = extractor.getSampleFlags(); if (mediaMuxer.writeSampleData(muxerTrackIndex, buffer, info, isAudio)) { // didWriteData(messageObject, file, false, false); } extractor.advance(); } else { eof = true; } } } else if (index == -1) { eof = true; } if (eof) { inputDone = true; } } extractor.unselectTrack(trackIndex); return startTime; } return -1; }
Example 7
Source File: ExtractMpegFramesTest.java From Android-MediaCodec-Examples with Apache License 2.0 | 4 votes |
/** * Work loop. */ static void doExtract(MediaExtractor extractor, int trackIndex, MediaCodec decoder, CodecOutputSurface outputSurface) throws IOException { final int TIMEOUT_USEC = 10000; ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers(); MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); int inputChunk = 0; int decodeCount = 0; long frameSaveTime = 0; boolean outputDone = false; boolean inputDone = false; while (!outputDone) { if (VERBOSE) Log.d(TAG, "loop"); // Feed more data to the decoder. if (!inputDone) { int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC); if (inputBufIndex >= 0) { ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex]; // Read the sample data into the ByteBuffer. This neither respects nor // updates inputBuf's position, limit, etc. int chunkSize = extractor.readSampleData(inputBuf, 0); if (chunkSize < 0) { // End of stream -- send empty frame with EOS flag set. decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM); inputDone = true; if (VERBOSE) Log.d(TAG, "sent input EOS"); } else { if (extractor.getSampleTrackIndex() != trackIndex) { Log.w(TAG, "WEIRD: got sample from track " + extractor.getSampleTrackIndex() + ", expected " + trackIndex); } long presentationTimeUs = extractor.getSampleTime(); decoder.queueInputBuffer(inputBufIndex, 0, chunkSize, presentationTimeUs, 0 /*flags*/); if (VERBOSE) { Log.d(TAG, "submitted frame " + inputChunk + " to dec, size=" + chunkSize); } inputChunk++; extractor.advance(); } } else { if (VERBOSE) Log.d(TAG, "input buffer not available"); } } if (!outputDone) { int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC); if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { // no output available yet if (VERBOSE) Log.d(TAG, "no output from decoder available"); } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { // not important for us, since we're using Surface if (VERBOSE) Log.d(TAG, "decoder output buffers changed"); } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { MediaFormat newFormat = decoder.getOutputFormat(); if (VERBOSE) Log.d(TAG, "decoder output format changed: " + newFormat); } else if (decoderStatus < 0) { fail("unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus); } else { // decoderStatus >= 0 if (VERBOSE) Log.d(TAG, "surface decoder given buffer " + decoderStatus + " (size=" + info.size + ")"); if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { if (VERBOSE) Log.d(TAG, "output EOS"); outputDone = true; } boolean doRender = (info.size != 0); // As soon as we call releaseOutputBuffer, the buffer will be forwarded // to SurfaceTexture to convert to a texture. The API doesn't guarantee // that the texture will be available before the call returns, so we // need to wait for the onFrameAvailable callback to fire. decoder.releaseOutputBuffer(decoderStatus, doRender); if (doRender) { if (VERBOSE) Log.d(TAG, "awaiting decode of frame " + decodeCount); outputSurface.awaitNewImage(); outputSurface.drawImage(true); if (decodeCount < MAX_FRAMES) { File outputFile = new File(FILES_DIR, String.format("frame-%02d.png", decodeCount)); long startWhen = System.nanoTime(); outputSurface.saveFrame(outputFile.toString()); frameSaveTime += System.nanoTime() - startWhen; } decodeCount++; } } } } int numSaved = (MAX_FRAMES < decodeCount) ? MAX_FRAMES : decodeCount; Log.d(TAG, "Saving " + numSaved + " frames took " + (frameSaveTime / numSaved / 1000) + " us per frame"); }
Example 8
Source File: ExtractMpegFramesTest_egl14.java From Android-MediaCodec-Examples with Apache License 2.0 | 4 votes |
/** * Work loop. */ static void doExtract(MediaExtractor extractor, int trackIndex, MediaCodec decoder, CodecOutputSurface outputSurface) throws IOException { final int TIMEOUT_USEC = 10000; ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers(); MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); int inputChunk = 0; int decodeCount = 0; long frameSaveTime = 0; boolean outputDone = false; boolean inputDone = false; while (!outputDone) { if (VERBOSE) Log.d(TAG, "loop"); // Feed more data to the decoder. if (!inputDone) { int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC); if (inputBufIndex >= 0) { ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex]; // Read the sample data into the ByteBuffer. This neither respects nor // updates inputBuf's position, limit, etc. int chunkSize = extractor.readSampleData(inputBuf, 0); if (chunkSize < 0) { // End of stream -- send empty frame with EOS flag set. decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM); inputDone = true; if (VERBOSE) Log.d(TAG, "sent input EOS"); } else { if (extractor.getSampleTrackIndex() != trackIndex) { Log.w(TAG, "WEIRD: got sample from track " + extractor.getSampleTrackIndex() + ", expected " + trackIndex); } long presentationTimeUs = extractor.getSampleTime(); decoder.queueInputBuffer(inputBufIndex, 0, chunkSize, presentationTimeUs, 0 /*flags*/); if (VERBOSE) { Log.d(TAG, "submitted frame " + inputChunk + " to dec, size=" + chunkSize); } inputChunk++; extractor.advance(); } } else { if (VERBOSE) Log.d(TAG, "input buffer not available"); } } if (!outputDone) { int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC); if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { // no output available yet if (VERBOSE) Log.d(TAG, "no output from decoder available"); } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { // not important for us, since we're using Surface if (VERBOSE) Log.d(TAG, "decoder output buffers changed"); } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { MediaFormat newFormat = decoder.getOutputFormat(); if (VERBOSE) Log.d(TAG, "decoder output format changed: " + newFormat); } else if (decoderStatus < 0) { fail("unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus); } else { // decoderStatus >= 0 if (VERBOSE) Log.d(TAG, "surface decoder given buffer " + decoderStatus + " (size=" + info.size + ")"); if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { if (VERBOSE) Log.d(TAG, "output EOS"); outputDone = true; } boolean doRender = (info.size != 0); // As soon as we call releaseOutputBuffer, the buffer will be forwarded // to SurfaceTexture to convert to a texture. The API doesn't guarantee // that the texture will be available before the call returns, so we // need to wait for the onFrameAvailable callback to fire. decoder.releaseOutputBuffer(decoderStatus, doRender); if (doRender) { if (VERBOSE) Log.d(TAG, "awaiting decode of frame " + decodeCount); outputSurface.awaitNewImage(); outputSurface.drawImage(true); if (decodeCount < MAX_FRAMES) { File outputFile = new File(FILES_DIR, String.format("frame-%02d.png", decodeCount)); long startWhen = System.nanoTime(); outputSurface.saveFrame(outputFile.toString()); frameSaveTime += System.nanoTime() - startWhen; } decodeCount++; } } } } int numSaved = (MAX_FRAMES < decodeCount) ? MAX_FRAMES : decodeCount; Log.d(TAG, "Saving " + numSaved + " frames took " + (frameSaveTime / numSaved / 1000) + " us per frame"); }
Example 9
Source File: VideoRecoder.java From deltachat-android with GNU General Public License v3.0 | 4 votes |
@TargetApi(16) private long readAndWriteTrack(MediaExtractor extractor, MP4Builder mediaMuxer, MediaCodec.BufferInfo info, long start, long end, File file, boolean isAudio) throws Exception { int trackIndex = selectTrack(extractor, isAudio); if (trackIndex >= 0) { extractor.selectTrack(trackIndex); MediaFormat trackFormat = extractor.getTrackFormat(trackIndex); int muxerTrackIndex = mediaMuxer.addTrack(trackFormat, isAudio); int maxBufferSize = trackFormat.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE); boolean inputDone = false; if (start > 0) { extractor.seekTo(start, MediaExtractor.SEEK_TO_PREVIOUS_SYNC); } else { extractor.seekTo(0, MediaExtractor.SEEK_TO_PREVIOUS_SYNC); } ByteBuffer buffer = ByteBuffer.allocateDirect(maxBufferSize); long startTime = -1; checkConversionCanceled(); long lastTimestamp = -100; while (!inputDone) { checkConversionCanceled(); boolean eof = false; int index = extractor.getSampleTrackIndex(); if (index == trackIndex) { info.size = extractor.readSampleData(buffer, 0); if (info.size >= 0) { info.presentationTimeUs = extractor.getSampleTime(); } else { info.size = 0; eof = true; } if (info.size > 0 && !eof) { if (start > 0 && startTime == -1) { startTime = info.presentationTimeUs; } if (end < 0 || info.presentationTimeUs < end) { if (info.presentationTimeUs > lastTimestamp) { info.offset = 0; info.flags = extractor.getSampleFlags(); if (mediaMuxer.writeSampleData(muxerTrackIndex, buffer, info, isAudio)) { //didWriteData(messageObject, file, false, false); } } lastTimestamp = info.presentationTimeUs; } else { eof = true; } } if (!eof) { extractor.advance(); } } else if (index == -1) { eof = true; } else { extractor.advance(); } if (eof) { inputDone = true; } } extractor.unselectTrack(trackIndex); return startTime; } return -1; }