Java Code Examples for android.media.AudioFormat#CHANNEL_IN_STEREO
The following examples show how to use
android.media.AudioFormat#CHANNEL_IN_STEREO .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MicrophoneSource.java From media-for-mobile with Apache License 2.0 | 6 votes |
public synchronized void configure(int sampleRate, int channels) { this.sampleRate = sampleRate; recordChannels = channels; switch (recordChannels) { case 1: { androidChannels = AudioFormat.CHANNEL_IN_MONO; } break; case 2: { androidChannels = AudioFormat.CHANNEL_IN_STEREO; } break; } minBufferSize = AudioRecord.getMinBufferSize(sampleRate, androidChannels, audioEncoding); if (minBufferSize < 0) { this.sampleRate = 8000; minBufferSize = AudioRecord.getMinBufferSize(sampleRate, androidChannels, audioEncoding); } }
Example 2
Source File: AndroidRecorder.java From LingoRecorder with Apache License 2.0 | 6 votes |
public AndroidRecorder(final RecorderProperty recorderProperty) { this.recorderProperty = recorderProperty; if (this.recorderProperty.getBitsPerSample() == 16) { audioFormat = AudioFormat.ENCODING_PCM_16BIT; } else if (this.recorderProperty.getBitsPerSample() == 8) { audioFormat = AudioFormat.ENCODING_PCM_8BIT; } else { throw new RecorderException( "unsupported bitsPerSample: " + this.recorderProperty.getBitsPerSample()); } if (this.recorderProperty.getChannels() == 1) { this.channels = AudioFormat.CHANNEL_IN_MONO; } else if (this.recorderProperty.getChannels() == 2) { this.channels = AudioFormat.CHANNEL_IN_STEREO; } else { throw new RecorderException( "unsupported channel: " + this.recorderProperty.getChannels()); } }
Example 3
Source File: AudioUtil.java From VideoProcessor with Apache License 2.0 | 6 votes |
/** * 调整aac音量 * * @param volume [0,100] * @throws IOException */ public static void adjustAacVolume(Context context, VideoProcessor.MediaSource aacSource, String outPath, int volume , @Nullable VideoProgressListener listener) throws IOException { String name = "temp_aac_"+System.currentTimeMillis(); File pcmFile = new File(VideoUtil.getVideoCacheDir(context), name + ".pcm"); File pcmFile2 = new File(VideoUtil.getVideoCacheDir(context), name + "_2.pcm"); File wavFile = new File(VideoUtil.getVideoCacheDir(context), name + ".wav"); AudioUtil.decodeToPCM(aacSource, pcmFile.getAbsolutePath(), null, null); AudioUtil.adjustPcmVolume(pcmFile.getAbsolutePath(), pcmFile2.getAbsolutePath(), volume); MediaExtractor extractor = new MediaExtractor(); aacSource.setDataSource(extractor); int trackIndex = VideoUtil.selectTrack(extractor, true); MediaFormat aacFormat = extractor.getTrackFormat(trackIndex); int sampleRate = aacFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE); int oriChannelCount = aacFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT); int channelConfig = AudioFormat.CHANNEL_IN_MONO; if (oriChannelCount == 2) { channelConfig = AudioFormat.CHANNEL_IN_STEREO; } new PcmToWavUtil(sampleRate, channelConfig, oriChannelCount, AudioFormat.ENCODING_PCM_16BIT).pcmToWav(pcmFile2.getAbsolutePath(), wavFile.getAbsolutePath()); AudioUtil.encodeWAVToAAC(wavFile.getPath(), outPath, aacFormat, listener); }
Example 4
Source File: AudioRecorder.java From react-native-google-nearby-connection with MIT License | 6 votes |
public AudioRecord findAudioRecord() { for (int rate : AudioBuffer.POSSIBLE_SAMPLE_RATES) { for (short audioFormat : new short[] { AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT }) { for (short channelConfig : new short[] { AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_IN_STEREO }) { try { Log.d(TAG, "Attempting rate " + rate + "Hz, bits: " + audioFormat + ", channel: " + channelConfig); int bufferSize = AudioRecord.getMinBufferSize(rate, channelConfig, audioFormat); if (bufferSize != AudioRecord.ERROR_BAD_VALUE) { // check if we can instantiate and have a success AudioRecord recorder = new AudioRecord(AudioSource.DEFAULT, rate, channelConfig, audioFormat, bufferSize); if (recorder.getState() == AudioRecord.STATE_INITIALIZED) { return recorder; } } } catch (Exception e) { Log.e(TAG, rate + "Exception, keep trying.",e); } } } } return null; }
Example 5
Source File: RecordAudioTest.java From AndPermission with Apache License 2.0 | 5 votes |
public static int[] findAudioParameters() { for (int rate : RATES) { for (int channel : new int[]{AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_IN_STEREO}) { for (int format : new int[]{AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT}) { int buffer = AudioRecord.getMinBufferSize(rate, channel, format); if (buffer != AudioRecord.ERROR_BAD_VALUE) { return new int[]{rate, channel, format, buffer}; } } } } return null; }
Example 6
Source File: WavFileHelper.java From video-quickstart-android with MIT License | 5 votes |
/** * Writes the proper 44-byte RIFF/WAVE header to/for the given stream Two size fields are left * empty/null since we do not yet know the final stream size * * @param out The stream to write the header to * @param channelMask An AudioFormat.CHANNEL_* mask * @param sampleRate The sample rate in hertz * @param encoding An AudioFormat.ENCODING_PCM_* value * @throws IOException */ private static void writeWavHeader( OutputStream out, int channelMask, int sampleRate, int encoding) throws IOException { short channels; switch (channelMask) { case AudioFormat.CHANNEL_IN_MONO: channels = 1; break; case AudioFormat.CHANNEL_IN_STEREO: channels = 2; break; default: throw new IllegalArgumentException("Unacceptable channel mask"); } short bitDepth; switch (encoding) { case AudioFormat.ENCODING_PCM_8BIT: bitDepth = 8; break; case AudioFormat.ENCODING_PCM_16BIT: bitDepth = 16; break; case AudioFormat.ENCODING_PCM_FLOAT: bitDepth = 32; break; default: throw new IllegalArgumentException("Unacceptable encoding"); } writeWavHeader(out, channels, sampleRate, bitDepth); }
Example 7
Source File: MediaAudioEncoder.java From EZFilter with MIT License | 5 votes |
private int getChannels(int channelConfig) { int channels; switch (channelConfig) { case AudioFormat.CHANNEL_IN_MONO: channels = 1; break; case AudioFormat.CHANNEL_IN_STEREO: channels = 2; break; default: channels = 2; break; } return channels; }
Example 8
Source File: AudioManagerAndroid.java From android-chromium with BSD 2-Clause "Simplified" License | 5 votes |
/** * Returns the minimum frame size required for audio input. * * @param sampleRate sampling rate * @param channels number of channels */ @CalledByNative private static int getMinInputFrameSize(int sampleRate, int channels) { int channelConfig; if (channels == 1) { channelConfig = AudioFormat.CHANNEL_IN_MONO; } else if (channels == 2) { channelConfig = AudioFormat.CHANNEL_IN_STEREO; } else { return -1; } return AudioRecord.getMinBufferSize( sampleRate, channelConfig, AudioFormat.ENCODING_PCM_16BIT) / 2 / channels; }
Example 9
Source File: RecordAudioTester.java From PermissionAgent with Apache License 2.0 | 5 votes |
private static AudioRecord findAudioRecord() { for (int rate : RATES) { for (short format : new short[] {AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT}) { for (short channel : new short[] {AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_IN_STEREO}) { int buffer = AudioRecord.getMinBufferSize(rate, channel, format); if (buffer != AudioRecord.ERROR_BAD_VALUE) { AudioRecord recorder = new AudioRecord(MediaRecorder.AudioSource.MIC, rate, channel, format, buffer); if (recorder.getState() == AudioRecord.STATE_INITIALIZED) return recorder; } } } } return null; }
Example 10
Source File: AudioManagerAndroid.java From android-chromium with BSD 2-Clause "Simplified" License | 5 votes |
/** * Returns the minimum frame size required for audio input. * * @param sampleRate sampling rate * @param channels number of channels */ @CalledByNative private static int getMinInputFrameSize(int sampleRate, int channels) { int channelConfig; if (channels == 1) { channelConfig = AudioFormat.CHANNEL_IN_MONO; } else if (channels == 2) { channelConfig = AudioFormat.CHANNEL_IN_STEREO; } else { return -1; } return AudioRecord.getMinBufferSize( sampleRate, channelConfig, AudioFormat.ENCODING_PCM_16BIT) / 2 / channels; }
Example 11
Source File: WebRtcAudioManager.java From webrtc_android with MIT License | 5 votes |
private static int getMinInputFrameSize(int sampleRateInHz, int numChannels) { final int bytesPerFrame = numChannels * (BITS_PER_SAMPLE / 8); final int channelConfig = (numChannels == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO); return AudioRecord.getMinBufferSize( sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT) / bytesPerFrame; }
Example 12
Source File: WavFileHelper.java From video-quickstart-android with MIT License | 5 votes |
private int getChannelMask(int channels) { switch (channels) { case 1: return AudioFormat.CHANNEL_IN_MONO; case 2: return AudioFormat.CHANNEL_IN_STEREO; } return AudioFormat.CHANNEL_IN_STEREO; }
Example 13
Source File: RecordingActivity.java From Android-Audio-Recorder with Apache License 2.0 | 4 votes |
EncoderInfo getInfo() { final int channels = RawSamples.CHANNEL_CONFIG == AudioFormat.CHANNEL_IN_STEREO ? 2 : 1; final int bps = RawSamples.AUDIO_FORMAT == AudioFormat.ENCODING_PCM_16BIT ? 16 : 8; return new EncoderInfo(channels, sampleRate, bps); }
Example 14
Source File: MicOpusRecorder.java From DeviceConnect-Android with MIT License | 4 votes |
/** * 音声をレコードして、MediaCodec に渡します. */ private void recordAudio() throws NativeInterfaceException { int samplingRate = mSamplingRate.getValue(); int channels = mChannels == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO; int audioFormat = AudioFormat.ENCODING_PCM_16BIT; int bufferSize = AudioRecord.getMinBufferSize(samplingRate, channels, audioFormat) * 4; int oneFrameDataCount = mSamplingRate.getValue() / mFrameSize.getFps(); mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, samplingRate, channels, audioFormat, bufferSize); if (mAudioRecord.getState() != AudioRecord.STATE_INITIALIZED) { if (mAudioRecordCallback != null) { mAudioRecordCallback.onEncoderError(); } return; } if (mUseAEC && AcousticEchoCanceler.isAvailable()) { // ノイズキャンセラー mEchoCanceler = AcousticEchoCanceler.create(mAudioRecord.getAudioSessionId()); if (mEchoCanceler != null) { int ret = mEchoCanceler.setEnabled(true); if (ret != AudioEffect.SUCCESS) { if (DEBUG) { Log.w(TAG, "AcousticEchoCanceler is not supported."); } } } } OpusEncoder opusEncoder = null; try { opusEncoder = new OpusEncoder(mSamplingRate, mChannels, mFrameSize, mBitRate, mApplication); mAudioRecord.startRecording(); short[] emptyBuffer = new short[oneFrameDataCount]; short[] pcmBuffer = new short[oneFrameDataCount]; byte[] opusFrameBuffer = opusEncoder.bufferAllocate(); while (!mStopFlag) { int readSize = mAudioRecord.read(pcmBuffer, 0, oneFrameDataCount); if (readSize > 0) { int opusFrameBufferLength; if (isMute()) { opusFrameBufferLength = opusEncoder.encode(emptyBuffer, readSize, opusFrameBuffer); } else { opusFrameBufferLength = opusEncoder.encode(pcmBuffer, readSize, opusFrameBuffer); } if (opusFrameBufferLength > 0 && mAudioRecordCallback != null) { mAudioRecordCallback.onPeriodicNotification(opusFrameBuffer, opusFrameBufferLength); } } else if (readSize == AudioRecord.ERROR_INVALID_OPERATION) { if (DEBUG) { Log.e(TAG, "Invalid operation error."); } break; } else if (readSize == AudioRecord.ERROR_BAD_VALUE) { if (DEBUG) { Log.e(TAG, "Bad value error."); } break; } else if (readSize == AudioRecord.ERROR) { if (DEBUG) { Log.e(TAG, "Unknown error."); } break; } } } finally { if (mEchoCanceler != null) { mEchoCanceler.release(); mEchoCanceler = null; } if (opusEncoder != null) { opusEncoder.release(); } } }
Example 15
Source File: AudioCaptureThread.java From VideoRecorder with Apache License 2.0 | 4 votes |
public AudioCaptureThread(AudioEncoder encoder, int sampleRate, int frames, int channelCount) { mAudioEncoder = encoder; mSampleRate = sampleRate; mFrames = frames; mChannelConfig = channelCount == 2 ? AudioFormat.CHANNEL_IN_STEREO : AudioFormat.CHANNEL_IN_MONO; }
Example 16
Source File: RNAudioRecordModule.java From react-native-audio-record with MIT License | 4 votes |
@ReactMethod public void init(ReadableMap options) { sampleRateInHz = 44100; if (options.hasKey("sampleRate")) { sampleRateInHz = options.getInt("sampleRate"); } channelConfig = AudioFormat.CHANNEL_IN_MONO; if (options.hasKey("channels")) { if (options.getInt("channels") == 2) { channelConfig = AudioFormat.CHANNEL_IN_STEREO; } } audioFormat = AudioFormat.ENCODING_PCM_16BIT; if (options.hasKey("bitsPerSample")) { if (options.getInt("bitsPerSample") == 8) { audioFormat = AudioFormat.ENCODING_PCM_8BIT; } } audioSource = AudioSource.VOICE_RECOGNITION; if (options.hasKey("audioSource")) { audioSource = options.getInt("audioSource"); } String documentDirectoryPath = getReactApplicationContext().getFilesDir().getAbsolutePath(); outFile = documentDirectoryPath + "/" + "audio.wav"; tmpFile = documentDirectoryPath + "/" + "temp.pcm"; if (options.hasKey("wavFile")) { String fileName = options.getString("wavFile"); outFile = documentDirectoryPath + "/" + fileName; } isRecording = false; eventEmitter = reactContext.getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class); bufferSize = AudioRecord.getMinBufferSize(sampleRateInHz, channelConfig, audioFormat); int recordingBufferSize = bufferSize * 3; recorder = new AudioRecord(audioSource, sampleRateInHz, channelConfig, audioFormat, recordingBufferSize); }
Example 17
Source File: StreamPublisher.java From AndroidInstantVideo with Apache License 2.0 | 4 votes |
public StreamPublisherParam() { this(640, 480, 2949120, 30, 5, 44100, 192000, MediaRecorder.AudioSource.MIC, AudioFormat.CHANNEL_IN_STEREO); }
Example 18
Source File: WebRtcAudioRecord.java From webrtc_android with MIT License | 4 votes |
private int channelCountToConfiguration(int channels) { return (channels == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO); }
Example 19
Source File: AudioQuality.java From DeviceConnect-Android with MIT License | 3 votes |
/** * チャンネルを設定します. * * <p> * デフォルトでは、 {@link AudioFormat#CHANNEL_IN_MONO} が設定されています。 * </p> * * @param channel {@link AudioFormat#CHANNEL_IN_STEREO} or {@link AudioFormat#CHANNEL_IN_MONO} */ public void setChannel(int channel) { if (channel != AudioFormat.CHANNEL_IN_STEREO && channel != AudioFormat.CHANNEL_IN_MONO) { throw new IllegalArgumentException("Not supported a channel. channel=" + channel); } mChannel = channel; }
Example 20
Source File: AudioQuality.java From DeviceConnect-Android with MIT License | 2 votes |
/** * チャンネル数を取得します. * * @return チャンネル数 */ public int getChannelCount() { return mChannel == AudioFormat.CHANNEL_IN_STEREO ? 2 : 1; }