Java Code Examples for android.media.AudioFormat#ENCODING_PCM_FLOAT
The following examples show how to use
android.media.AudioFormat#ENCODING_PCM_FLOAT .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SimpleAudioOutput.java From media-samples with Apache License 2.0 | 6 votes |
public AudioTrack createAudioTrack(int frameRate) { int minBufferSizeBytes = AudioTrack.getMinBufferSize(frameRate, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_FLOAT); Log.i(TAG, "AudioTrack.minBufferSize = " + minBufferSizeBytes + " bytes = " + (minBufferSizeBytes / BYTES_PER_FRAME) + " frames"); int bufferSize = 8 * minBufferSizeBytes / 8; int outputBufferSizeFrames = bufferSize / BYTES_PER_FRAME; Log.i(TAG, "actual bufferSize = " + bufferSize + " bytes = " + outputBufferSizeFrames + " frames"); AudioTrack player = new AudioTrack(AudioManager.STREAM_MUSIC, mFrameRate, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_FLOAT, bufferSize, AudioTrack.MODE_STREAM); Log.i(TAG, "created AudioTrack"); return player; }
Example 2
Source File: SimpleAudioOutput.java From media-samples with Apache License 2.0 | 6 votes |
public AudioTrack createAudioTrack(int frameRate) { int minBufferSizeBytes = AudioTrack.getMinBufferSize(frameRate, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_FLOAT); Log.i(TAG, "AudioTrack.minBufferSize = " + minBufferSizeBytes + " bytes = " + (minBufferSizeBytes / BYTES_PER_FRAME) + " frames"); int bufferSize = 8 * minBufferSizeBytes / 8; int outputBufferSizeFrames = bufferSize / BYTES_PER_FRAME; Log.i(TAG, "actual bufferSize = " + bufferSize + " bytes = " + outputBufferSizeFrames + " frames"); AudioTrack player = new AudioTrack(AudioManager.STREAM_MUSIC, mFrameRate, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_FLOAT, bufferSize, AudioTrack.MODE_STREAM); Log.i(TAG, "created AudioTrack"); return player; }
Example 3
Source File: Microphone.java From ssj with GNU General Public License v3.0 | 6 votes |
public static int audioFormatSampleBytes(int f) { switch (f) { case AudioFormat.ENCODING_PCM_8BIT: return 1; case AudioFormat.ENCODING_PCM_16BIT: case AudioFormat.ENCODING_DEFAULT: return 2; case AudioFormat.ENCODING_PCM_FLOAT: return 4; case AudioFormat.ENCODING_INVALID: default: return 0; } }
Example 4
Source File: Microphone.java From ssj with GNU General Public License v3.0 | 6 votes |
public static Cons.Type audioFormatSampleType(int f) { switch (f) { case AudioFormat.ENCODING_PCM_8BIT: return Cons.Type.CHAR; case AudioFormat.ENCODING_PCM_16BIT: case AudioFormat.ENCODING_DEFAULT: return Cons.Type.SHORT; case AudioFormat.ENCODING_PCM_FLOAT: return Cons.Type.FLOAT; case AudioFormat.ENCODING_INVALID: default: return Cons.Type.UNDEF; } }
Example 5
Source File: SimpleAudioOutput.java From android-MidiSynth with Apache License 2.0 | 6 votes |
public AudioTrack createAudioTrack(int frameRate) { int minBufferSizeBytes = AudioTrack.getMinBufferSize(frameRate, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_FLOAT); Log.i(TAG, "AudioTrack.minBufferSize = " + minBufferSizeBytes + " bytes = " + (minBufferSizeBytes / BYTES_PER_FRAME) + " frames"); int bufferSize = 8 * minBufferSizeBytes / 8; int outputBufferSizeFrames = bufferSize / BYTES_PER_FRAME; Log.i(TAG, "actual bufferSize = " + bufferSize + " bytes = " + outputBufferSizeFrames + " frames"); AudioTrack player = new AudioTrack(AudioManager.STREAM_MUSIC, mFrameRate, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_FLOAT, bufferSize, AudioTrack.MODE_STREAM); Log.i(TAG, "created AudioTrack"); return player; }
Example 6
Source File: SimpleAudioOutput.java From android-MidiScope with Apache License 2.0 | 6 votes |
public AudioTrack createAudioTrack(int frameRate) { int minBufferSizeBytes = AudioTrack.getMinBufferSize(frameRate, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_FLOAT); Log.i(TAG, "AudioTrack.minBufferSize = " + minBufferSizeBytes + " bytes = " + (minBufferSizeBytes / BYTES_PER_FRAME) + " frames"); int bufferSize = 8 * minBufferSizeBytes / 8; int outputBufferSizeFrames = bufferSize / BYTES_PER_FRAME; Log.i(TAG, "actual bufferSize = " + bufferSize + " bytes = " + outputBufferSizeFrames + " frames"); AudioTrack player = new AudioTrack(AudioManager.STREAM_MUSIC, mFrameRate, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_FLOAT, bufferSize, AudioTrack.MODE_STREAM); Log.i(TAG, "created AudioTrack"); return player; }
Example 7
Source File: WebRtcAudioRecord.java From webrtc_android with MIT License | 5 votes |
private static int getBytesPerSample(int audioFormat) { switch (audioFormat) { case AudioFormat.ENCODING_PCM_8BIT: return 1; case AudioFormat.ENCODING_PCM_16BIT: case AudioFormat.ENCODING_IEC61937: case AudioFormat.ENCODING_DEFAULT: return 2; case AudioFormat.ENCODING_PCM_FLOAT: return 4; case AudioFormat.ENCODING_INVALID: default: throw new IllegalArgumentException("Bad audio format " + audioFormat); } }
Example 8
Source File: AudioSaveHelper.java From Android-AudioRecorder-App with Apache License 2.0 | 5 votes |
/** * Writes the proper 44-byte RIFF/WAVE header to/for the given stream * Two size fields are left empty/null since we do not yet know the final stream size * * @param out The stream to write the header to * @param channelMask An AudioFormat.CHANNEL_* mask * @param sampleRate The sample rate in hertz * @param encoding An AudioFormat.ENCODING_PCM_* value * @throws IOException */ private void writeWavHeader(OutputStream out, int channelMask, int sampleRate, int encoding) throws IOException { short channels; switch (channelMask) { case AudioFormat.CHANNEL_IN_MONO: channels = 1; break; case AudioFormat.CHANNEL_IN_STEREO: channels = 2; break; default: throw new IllegalArgumentException("Unacceptable channel mask"); } short bitDepth; switch (encoding) { case AudioFormat.ENCODING_PCM_8BIT: bitDepth = 8; break; case AudioFormat.ENCODING_PCM_16BIT: bitDepth = 16; break; case AudioFormat.ENCODING_PCM_FLOAT: bitDepth = 32; break; default: throw new IllegalArgumentException("Unacceptable encoding"); } writeWavHeader(out, channels, sampleRate, bitDepth); }
Example 9
Source File: WavFileHelper.java From video-quickstart-android with MIT License | 5 votes |
/** * Writes the proper 44-byte RIFF/WAVE header to/for the given stream Two size fields are left * empty/null since we do not yet know the final stream size * * @param out The stream to write the header to * @param channelMask An AudioFormat.CHANNEL_* mask * @param sampleRate The sample rate in hertz * @param encoding An AudioFormat.ENCODING_PCM_* value * @throws IOException */ private static void writeWavHeader( OutputStream out, int channelMask, int sampleRate, int encoding) throws IOException { short channels; switch (channelMask) { case AudioFormat.CHANNEL_IN_MONO: channels = 1; break; case AudioFormat.CHANNEL_IN_STEREO: channels = 2; break; default: throw new IllegalArgumentException("Unacceptable channel mask"); } short bitDepth; switch (encoding) { case AudioFormat.ENCODING_PCM_8BIT: bitDepth = 8; break; case AudioFormat.ENCODING_PCM_16BIT: bitDepth = 16; break; case AudioFormat.ENCODING_PCM_FLOAT: bitDepth = 32; break; default: throw new IllegalArgumentException("Unacceptable encoding"); } writeWavHeader(out, channels, sampleRate, bitDepth); }
Example 10
Source File: AndroidAudioForJSyn.java From jsyn with Apache License 2.0 | 5 votes |
public void start() { Process.setThreadPriority(-5); minBufferSize = AudioTrack.getMinBufferSize(frameRate, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_FLOAT); System.out.println("Audio minBufferSize = " + minBufferSize); bufferSize = (3 * (minBufferSize / 2)) & ~3; System.out.println("Audio bufferSize = " + bufferSize); audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, frameRate, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_FLOAT, bufferSize, AudioTrack.MODE_STREAM); audioTrack.play(); }
Example 11
Source File: BaseAudioDecoder.java From sdl_java_suite with BSD 3-Clause "New" or "Revised" License | 5 votes |
protected void onOutputFormatChanged(@NonNull MediaFormat mediaFormat) { if (mediaFormat.containsKey(MediaFormat.KEY_CHANNEL_COUNT)) { outputChannelCount = mediaFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT); } if (mediaFormat.containsKey(MediaFormat.KEY_SAMPLE_RATE)) { outputSampleRate = mediaFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE); } if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.N && mediaFormat.containsKey(MediaFormat.KEY_PCM_ENCODING)) { int key = mediaFormat.getInteger(MediaFormat.KEY_PCM_ENCODING); switch (key) { case AudioFormat.ENCODING_PCM_8BIT: outputSampleType = SampleType.UNSIGNED_8_BIT; break; case AudioFormat.ENCODING_PCM_FLOAT: outputSampleType = SampleType.FLOAT; break; case AudioFormat.ENCODING_PCM_16BIT: default: // by default we fallback to signed 16 bit samples outputSampleType = SampleType.SIGNED_16_BIT; break; } } else { outputSampleType = SampleType.SIGNED_16_BIT; } }
Example 12
Source File: AndroidAudioForJSyn.java From jsyn with Apache License 2.0 | 5 votes |
public void start() { Process.setThreadPriority(-5); minBufferSize = AudioTrack.getMinBufferSize(frameRate, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_FLOAT); System.out.println("Audio minBufferSize = " + minBufferSize); bufferSize = (3 * (minBufferSize / 2)) & ~3; System.out.println("Audio bufferSize = " + bufferSize); audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, frameRate, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_FLOAT, bufferSize, AudioTrack.MODE_STREAM); audioTrack.play(); }
Example 13
Source File: FileSynthesisCallback.java From android_9.0.0_r45 with Apache License 2.0 | 4 votes |
@Override public int start(int sampleRateInHz, int audioFormat, int channelCount) { if (DBG) { Log.d(TAG, "FileSynthesisRequest.start(" + sampleRateInHz + "," + audioFormat + "," + channelCount + ")"); } if (audioFormat != AudioFormat.ENCODING_PCM_8BIT && audioFormat != AudioFormat.ENCODING_PCM_16BIT && audioFormat != AudioFormat.ENCODING_PCM_FLOAT) { Log.e(TAG, "Audio format encoding " + audioFormat + " not supported. Please use one " + "of AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT or " + "AudioFormat.ENCODING_PCM_FLOAT"); } mDispatcher.dispatchOnBeginSynthesis(sampleRateInHz, audioFormat, channelCount); FileChannel fileChannel = null; synchronized (mStateLock) { if (mStatusCode == TextToSpeech.STOPPED) { if (DBG) Log.d(TAG, "Request has been aborted."); return errorCodeOnStop(); } if (mStatusCode != TextToSpeech.SUCCESS) { if (DBG) Log.d(TAG, "Error was raised"); return TextToSpeech.ERROR; } if (mStarted) { Log.e(TAG, "Start called twice"); return TextToSpeech.ERROR; } mStarted = true; mSampleRateInHz = sampleRateInHz; mAudioFormat = audioFormat; mChannelCount = channelCount; mDispatcher.dispatchOnStart(); fileChannel = mFileChannel; } try { fileChannel.write(ByteBuffer.allocate(WAV_HEADER_LENGTH)); return TextToSpeech.SUCCESS; } catch (IOException ex) { Log.e(TAG, "Failed to write wav header to output file descriptor", ex); synchronized (mStateLock) { cleanUp(); mStatusCode = TextToSpeech.ERROR_OUTPUT; } return TextToSpeech.ERROR; } }
Example 14
Source File: PlaybackSynthesisCallback.java From android_9.0.0_r45 with Apache License 2.0 | 4 votes |
@Override public int start(int sampleRateInHz, int audioFormat, int channelCount) { if (DBG) Log.d(TAG, "start(" + sampleRateInHz + "," + audioFormat + "," + channelCount + ")"); if (audioFormat != AudioFormat.ENCODING_PCM_8BIT && audioFormat != AudioFormat.ENCODING_PCM_16BIT && audioFormat != AudioFormat.ENCODING_PCM_FLOAT) { Log.w(TAG, "Audio format encoding " + audioFormat + " not supported. Please use one " + "of AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT or " + "AudioFormat.ENCODING_PCM_FLOAT"); } mDispatcher.dispatchOnBeginSynthesis(sampleRateInHz, audioFormat, channelCount); int channelConfig = BlockingAudioTrack.getChannelConfig(channelCount); synchronized (mStateLock) { if (channelConfig == 0) { Log.e(TAG, "Unsupported number of channels :" + channelCount); mStatusCode = TextToSpeech.ERROR_OUTPUT; return TextToSpeech.ERROR; } if (mStatusCode == TextToSpeech.STOPPED) { if (DBG) Log.d(TAG, "stop() called before start(), returning."); return errorCodeOnStop(); } if (mStatusCode != TextToSpeech.SUCCESS) { if (DBG) Log.d(TAG, "Error was raised"); return TextToSpeech.ERROR; } if (mItem != null) { Log.e(TAG, "Start called twice"); return TextToSpeech.ERROR; } SynthesisPlaybackQueueItem item = new SynthesisPlaybackQueueItem( mAudioParams, sampleRateInHz, audioFormat, channelCount, mDispatcher, mCallerIdentity, mLogger); mAudioTrackHandler.enqueue(item); mItem = item; } return TextToSpeech.SUCCESS; }
Example 15
Source File: AudioStreamManagerTest.java From sdl_java_suite with BSD 3-Clause "New" or "Revised" License | 4 votes |
public void testOutputFormatChanged() { BaseAudioDecoder mockDecoder = mock(BaseAudioDecoder.class, Mockito.CALLS_REAL_METHODS); try { Field outputChannelCountField = BaseAudioDecoder.class.getDeclaredField("outputChannelCount"); Field outputSampleRateField = BaseAudioDecoder.class.getDeclaredField("outputSampleRate"); Field outputSampleTypeField = BaseAudioDecoder.class.getDeclaredField("outputSampleType"); outputChannelCountField.setAccessible(true); outputSampleRateField.setAccessible(true); outputSampleTypeField.setAccessible(true); // channel count, sample rate, sample type int key_channel_count = 0, key_sample_rate = 1, key_sample_type = 2, key_sample_type_result = 3; int[][] tests = new int[][] { { 47, 42000, AudioFormat.ENCODING_PCM_8BIT, SampleType.UNSIGNED_8_BIT }, { 2, 16000, AudioFormat.ENCODING_PCM_16BIT, SampleType.SIGNED_16_BIT }, { 1, 22050, AudioFormat.ENCODING_PCM_FLOAT, SampleType.FLOAT }, { 3, 48000, AudioFormat.ENCODING_INVALID, SampleType.SIGNED_16_BIT }, }; for (int[] test : tests) { int channel_count = test[key_channel_count]; int sample_rate = test[key_sample_rate]; int sample_type = test[key_sample_type]; int sample_type_result = test[key_sample_type_result]; MediaFormat format = new MediaFormat(); format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, channel_count); format.setInteger(MediaFormat.KEY_SAMPLE_RATE, sample_rate); format.setInteger(MediaFormat.KEY_PCM_ENCODING, sample_type); // in case the phone version is old the method does not take sample type into account but // always expected 16 bit. See https://developer.android.com/reference/android/media/MediaFormat.html#KEY_PCM_ENCODING if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.N) { sample_type_result = SampleType.SIGNED_16_BIT; } mockDecoder.onOutputFormatChanged(format); int output_channel_count = outputChannelCountField.getInt(mockDecoder); int output_sample_rate = outputSampleRateField.getInt(mockDecoder); int output_sample_type = outputSampleTypeField.getInt(mockDecoder); // changing from assertEquals to if and fail so travis gives better results if (channel_count != output_channel_count) { fail("AssertEqualsFailed: channel_count == output_channel_count (" + channel_count + " == " + output_channel_count + ")"); } if (sample_rate != output_sample_rate) { fail("AssertEqualsFailed: sample_rate == output_sample_rate (" + sample_rate + " == " + output_sample_rate + ")"); } if (sample_type_result != output_sample_type) { fail("Assert: sample_type_result == output_sample_type (" + sample_type_result + " == " + output_sample_type + ")"); } } } catch (Exception e) { e.printStackTrace(); fail(); } }