Java Code Examples for android.media.AudioRecord#ERROR_BAD_VALUE
The following examples show how to use
android.media.AudioRecord#ERROR_BAD_VALUE .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: VoiceRecorder.java From black-mirror with MIT License | 6 votes |
/** * Creates a new {@link AudioRecord}. * * @return A newly created {@link AudioRecord}, or null if it cannot be created (missing * permissions?). */ private AudioRecord createAudioRecord() { for (int sampleRate : SAMPLE_RATE_CANDIDATES) { final int sizeInBytes = AudioRecord.getMinBufferSize(sampleRate, CHANNEL, ENCODING); if (sizeInBytes == AudioRecord.ERROR_BAD_VALUE) { continue; } final AudioRecord audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleRate, CHANNEL, ENCODING, sizeInBytes); if (audioRecord.getState() == AudioRecord.STATE_INITIALIZED) { mBuffer = new byte[sizeInBytes]; return audioRecord; } else { audioRecord.release(); } } return null; }
Example 2
Source File: AudioCapturer.java From Android with Apache License 2.0 | 6 votes |
@Override public void run() { while (!mIsLoopExit) { byte[] buffer = new byte[mMinBufferSize]; int ret = mAudioRecord.read(buffer, 0, mMinBufferSize); if (ret == AudioRecord.ERROR_INVALID_OPERATION) { Log.e(TAG , "Error ERROR_INVALID_OPERATION"); } else if (ret == AudioRecord.ERROR_BAD_VALUE) { Log.e(TAG , "Error ERROR_BAD_VALUE"); } else { if (mAudioFrameCapturedListener != null) { mAudioFrameCapturedListener.onAudioFrameCaptured(buffer); } Log.d(TAG , "OK, Captured "+ret+" bytes !"); } SystemClock.sleep(10); } }
Example 3
Source File: ExtAudioCapture.java From PLDroidRTCStreaming with Apache License 2.0 | 6 votes |
@Override public void run() { while (!mIsLoopExit) { byte[] buffer = new byte[SAMPLES_PER_FRAME * 2]; int ret = mAudioRecord.read(buffer, 0, buffer.length); if (ret == AudioRecord.ERROR_INVALID_OPERATION) { Log.e(TAG, "Error ERROR_INVALID_OPERATION"); } else if (ret == AudioRecord.ERROR_BAD_VALUE) { Log.e(TAG, "Error ERROR_BAD_VALUE"); } else { if (mOnAudioFrameCapturedListener != null) { mOnAudioFrameCapturedListener.onAudioFrameCaptured(buffer, System.nanoTime()); } } } }
Example 4
Source File: ExtAudioCapture.java From PLDroidRTCStreaming with Apache License 2.0 | 5 votes |
public boolean startCapture(int audioSource, int sampleRateInHz, int channelConfig, int audioFormat) { if (mIsCaptureStarted) { Log.e(TAG, "Capture already started !"); return false; } int minBufferSize = AudioRecord.getMinBufferSize(sampleRateInHz, channelConfig, audioFormat); if (minBufferSize == AudioRecord.ERROR_BAD_VALUE) { Log.e(TAG, "Invalid parameter !"); return false; } mAudioRecord = new AudioRecord(audioSource, sampleRateInHz, channelConfig, audioFormat, minBufferSize * 4); if (mAudioRecord.getState() == AudioRecord.STATE_UNINITIALIZED) { Log.e(TAG, "AudioRecord initialize fail !"); return false; } mAudioRecord.startRecording(); mIsLoopExit = false; mCaptureThread = new Thread(new AudioCaptureRunnable()); mCaptureThread.start(); mIsCaptureStarted = true; Log.d(TAG, "Start audio capture success !"); return true; }
Example 5
Source File: SampleRateCalculator.java From voice-pitch-analyzer with GNU Affero General Public License v3.0 | 5 votes |
public static int getMaxSupportedSampleRate() { /* * Valid Audio Sample rates * * @see <a * href="http://en.wikipedia.org/wiki/Sampling_%28signal_processing%29" * >Wikipedia</a> */ final int validSampleRates[] = new int[]{ 47250, 44100, 44056, 37800, 32000, 22050, 16000, 11025, 4800, 8000,}; /* * Selecting default audio input source for recording since * AudioFormat.CHANNEL_CONFIGURATION_DEFAULT is deprecated and selecting * default encoding format. */ for (int i = 0; i < validSampleRates.length; i++) { int result = AudioRecord.getMinBufferSize(validSampleRates[i], android.media.AudioFormat.CHANNEL_IN_MONO, android.media.AudioFormat.ENCODING_PCM_16BIT); if (result != AudioRecord.ERROR && result != AudioRecord.ERROR_BAD_VALUE && result > 0) { // return the mininum supported audio sample rate return validSampleRates[i]; } } // If none of the sample rates are supported return -1 handle it in // calling method return -1; }
Example 6
Source File: AudioCapturer.java From Android with Apache License 2.0 | 5 votes |
public boolean startCapture(int audioSource, int sampleRateInHz, int channelConfig, int audioFormat) { if (mIsCaptureStarted) { Log.e(TAG, "Capture already started !"); return false; } mMinBufferSize = AudioRecord.getMinBufferSize(sampleRateInHz,channelConfig,audioFormat); if (mMinBufferSize == AudioRecord.ERROR_BAD_VALUE) { Log.e(TAG, "Invalid parameter !"); return false; } Log.d(TAG , "getMinBufferSize = "+mMinBufferSize+" bytes !"); mAudioRecord = new AudioRecord(audioSource,sampleRateInHz,channelConfig,audioFormat,mMinBufferSize); if (mAudioRecord.getState() == AudioRecord.STATE_UNINITIALIZED) { Log.e(TAG, "AudioRecord initialize fail !"); return false; } mAudioRecord.startRecording(); mIsLoopExit = false; mCaptureThread = new Thread(new AudioCaptureRunnable()); mCaptureThread.start(); mIsCaptureStarted = true; Log.d(TAG, "Start audio capture success !"); return true; }
Example 7
Source File: PullTransport.java From OmRecorder with Apache License 2.0 | 5 votes |
@Override void startPoolingAndWriting(AudioRecord audioRecord, int pullSizeInBytes, OutputStream outputStream) throws IOException { AudioChunk audioChunk = new AudioChunk.Bytes(new byte[pullSizeInBytes]); while (pullableSource.isEnableToBePulled()) { audioChunk.readCount(audioRecord.read(audioChunk.toBytes(), 0, pullSizeInBytes)); if (AudioRecord.ERROR_INVALID_OPERATION != audioChunk.readCount() && AudioRecord.ERROR_BAD_VALUE != audioChunk.readCount()) { if (onAudioChunkPulledListener != null) { postPullEvent(audioChunk); } writeAction.execute(audioChunk, outputStream); } } }
Example 8
Source File: MediaAudioEncoder.java From EZFilter with MIT License | 5 votes |
/** * 查找可用的音频录制器 * * @return */ private AudioRecord findAudioRecord() { int[] samplingRates = new int[]{44100, 22050, 11025, 8000}; int[] audioFormats = new int[]{ AudioFormat.ENCODING_PCM_16BIT, AudioFormat.ENCODING_PCM_8BIT}; int[] channelConfigs = new int[]{ AudioFormat.CHANNEL_IN_STEREO, AudioFormat.CHANNEL_IN_MONO}; for (int rate : samplingRates) { for (int format : audioFormats) { for (int config : channelConfigs) { try { int bufferSize = AudioRecord.getMinBufferSize(rate, config, format); if (bufferSize != AudioRecord.ERROR_BAD_VALUE) { for (int source : AUDIO_SOURCES) { AudioRecord recorder = new AudioRecord(source, rate, config, format, bufferSize * 4); if (recorder.getState() == AudioRecord.STATE_INITIALIZED) { mSamplingRate = rate; return recorder; } } } } catch (Exception e) { Log.e(TAG, "Init AudioRecord Error." + Log.getStackTraceString(e)); } } } } return null; }
Example 9
Source File: AACEncoder.java From AndroidInstantVideo with Apache License 2.0 | 5 votes |
public void start() { mAudioRecord.startRecording(); mMediaCodec.start(); final long startWhen = System.nanoTime(); final ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers(); mThread = new Thread(new Runnable() { @Override public void run() { int len, bufferIndex; while (isStart && !Thread.interrupted()) { synchronized (mMediaCodec) { if (!isStart) return; bufferIndex = mMediaCodec.dequeueInputBuffer(10000); if (bufferIndex >= 0) { inputBuffers[bufferIndex].clear(); long presentationTimeNs = System.nanoTime(); len = mAudioRecord.read(inputBuffers[bufferIndex], bufferSize); presentationTimeNs -= (len / samplingRate ) / 1000000000; Loggers.i(TAG, "Index: " + bufferIndex + " len: " + len + " buffer_capacity: " + inputBuffers[bufferIndex].capacity()); long presentationTimeUs = (presentationTimeNs - startWhen) / 1000; if (len == AudioRecord.ERROR_INVALID_OPERATION || len == AudioRecord.ERROR_BAD_VALUE) { Log.e(TAG, "An error occured with the AudioRecord API !"); } else { mMediaCodec.queueInputBuffer(bufferIndex, 0, len, presentationTimeUs, 0); if (onDataComingCallback != null) { onDataComingCallback.onComing(); } } } } } } }); mThread.start(); isStart = true; }
Example 10
Source File: AudioProcess.java From NoiseCapture with GNU General Public License v3.0 | 5 votes |
/** * Constructor * @param recording Recording state * @param canceled Canceled state * @param customLeqProcessing Custom receiver of sound signals */ public AudioProcess(AtomicBoolean recording, AtomicBoolean canceled, ProcessingThread customLeqProcessing) { this.recording = recording; this.canceled = canceled; this.customLeqProcessing = customLeqProcessing; final int[] mSampleRates = new int[] {44100}; // AWeigting coefficient are based on 44100 // Hz sampling rate, so we do not support other samplings (22050, 16000, 11025,8000) final int[] encodings = new int[] { AudioFormat.ENCODING_PCM_16BIT , AudioFormat.ENCODING_PCM_8BIT }; final short[] audioChannels = new short[] { AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_IN_STEREO }; for (int tryRate : mSampleRates) { for (int tryEncoding : encodings) { for(short tryAudioChannel : audioChannels) { int tryBufferSize = AudioRecord.getMinBufferSize(tryRate, tryAudioChannel, tryEncoding); if (tryBufferSize != AudioRecord.ERROR_BAD_VALUE) { // Take a higher buffer size in order to get a smooth recording under load // avoiding Buffer overflow error on AudioRecord side. bufferSize = Math.max(tryBufferSize, (int)(AcousticIndicators.TIMEPERIOD_FAST * tryRate)); encoding = tryEncoding; audioChannel = tryAudioChannel; rate = tryRate; this.fastLeqProcessing = new LeqProcessingThread(this, AcousticIndicators.TIMEPERIOD_FAST, true, hannWindowFast ? FFTSignalProcessing.WINDOW_TYPE.TUKEY : FFTSignalProcessing.WINDOW_TYPE.RECTANGULAR, PROP_MOVING_SPECTRUM, true); this.slowLeqProcessing = new LeqProcessingThread(this, AcousticIndicators.TIMEPERIOD_SLOW, true, hannWindowOneSecond ? FFTSignalProcessing.WINDOW_TYPE.TUKEY : FFTSignalProcessing.WINDOW_TYPE.RECTANGULAR, PROP_DELAYED_STANDART_PROCESSING, false); return; } } } } throw new IllegalStateException("This device is not compatible"); }
Example 11
Source File: AACHelper.java From CameraV with GNU General Public License v3.0 | 5 votes |
private int initAudioRecord(int rate) { try { Log.v("===========Attempting rate ", rate + "Hz, bits: " + audioFormat + ", channel: " + channelConfig); bufferSize = AudioRecord.getMinBufferSize(rate, channelConfig, audioFormat); if (bufferSize != AudioRecord.ERROR_BAD_VALUE) { // check if we can instantiate and have a success recorder = new AudioRecord(AudioSource.MIC, rate, channelConfig, audioFormat, bufferSize); if (recorder.getState() == AudioRecord.STATE_INITIALIZED) { Log.v("===========final rate ", rate + "Hz, bits: " + audioFormat + ", channel: " + channelConfig); return rate; } } } catch (Exception e) { Log.v("error", "" + rate); } return -1; }
Example 12
Source File: MicrophoneEncoder.java From cineio-broadcast-android with MIT License | 5 votes |
private void sendAudioToEncoder(boolean endOfStream) { // send current frame data to encoder try { ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers(); audioInputBufferIndex = mMediaCodec.dequeueInputBuffer(-1); if (audioInputBufferIndex >= 0) { ByteBuffer inputBuffer = inputBuffers[audioInputBufferIndex]; inputBuffer.clear(); audioInputLength = mAudioRecord.read(inputBuffer, SAMPLES_PER_FRAME * 2); audioAbsolutePtsUs = (System.nanoTime()) / 1000L; // We divide audioInputLength by 2 because audio samples are // 16bit. audioAbsolutePtsUs = getJitterFreePTS(audioAbsolutePtsUs, audioInputLength / 2); if (audioInputLength == AudioRecord.ERROR_INVALID_OPERATION) Log.e(TAG, "Audio read error: invalid operation"); if (audioInputLength == AudioRecord.ERROR_BAD_VALUE) Log.e(TAG, "Audio read error: bad value"); // if (VERBOSE) // Log.i(TAG, "queueing " + audioInputLength + " audio bytes with pts " + audioAbsolutePtsUs); if (endOfStream) { if (VERBOSE) Log.i(TAG, "EOS received in sendAudioToEncoder"); mMediaCodec.queueInputBuffer(audioInputBufferIndex, 0, audioInputLength, audioAbsolutePtsUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM); } else { mMediaCodec.queueInputBuffer(audioInputBufferIndex, 0, audioInputLength, audioAbsolutePtsUs, 0); } } } catch (Throwable t) { Log.e(TAG, "_offerAudioEncoder exception"); t.printStackTrace(); } }
Example 13
Source File: RecordAudioTester.java From PermissionAgent with Apache License 2.0 | 5 votes |
private static AudioRecord findAudioRecord() { for (int rate : RATES) { for (short format : new short[] {AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT}) { for (short channel : new short[] {AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_IN_STEREO}) { int buffer = AudioRecord.getMinBufferSize(rate, channel, format); if (buffer != AudioRecord.ERROR_BAD_VALUE) { AudioRecord recorder = new AudioRecord(MediaRecorder.AudioSource.MIC, rate, channel, format, buffer); if (recorder.getState() == AudioRecord.STATE_INITIALIZED) return recorder; } } } } return null; }
Example 14
Source File: StreamAudioRecorder.java From RxAndroidAudio with MIT License | 5 votes |
private void onError(int errorCode) { if (errorCode == AudioRecord.ERROR_INVALID_OPERATION) { Log.w(TAG, "record fail: ERROR_INVALID_OPERATION"); mAudioDataCallback.onError(); } else if (errorCode == AudioRecord.ERROR_BAD_VALUE) { Log.w(TAG, "record fail: ERROR_BAD_VALUE"); mAudioDataCallback.onError(); } }
Example 15
Source File: ExtAudioCapture.java From PLDroidMediaStreaming with Apache License 2.0 | 5 votes |
@Override public void run() { while (!mIsLoopExit) { int ret = mAudioRecord.read(mAudioSrcBuffer, 0, mAudioSrcBuffer.length); if (ret == AudioRecord.ERROR_INVALID_OPERATION) { Log.e(TAG, "Error ERROR_INVALID_OPERATION"); } else if (ret == AudioRecord.ERROR_BAD_VALUE) { Log.e(TAG, "Error ERROR_BAD_VALUE"); } else { if (mOnAudioFrameCapturedListener != null) { mOnAudioFrameCapturedListener.onAudioFrameCaptured(mAudioSrcBuffer); } } } }
Example 16
Source File: AudioCapture.java From AudioVideoCodec with Apache License 2.0 | 5 votes |
public void start(int audioSource, int sampleRate, int channels, int audioFormat) { if (isStartRecord) { if (isDebug) Log.d(TAG, "音频录制已经开启"); return; } //各厂商实现存在差异 bufferSize = AudioRecord.getMinBufferSize(sampleRate, channels, audioFormat); if (bufferSize == AudioRecord.ERROR_BAD_VALUE) { if (isDebug) Log.d(TAG, "无效参数"); return; } if (isDebug) Log.d(TAG, "bufferSize = ".concat(String.valueOf(bufferSize)).concat("byte")); audioRecord = new AudioRecord(AudioCapture.AUDIO_SOURCE, sampleRate, channels, audioFormat, bufferSize); audioRecord.startRecording(); isStopRecord = false; threadCapture = new Thread(new CaptureRunnable()); threadCapture.start(); isStartRecord = true; if (isDebug) { Log.d(TAG, "音频录制开启..."); } }
Example 17
Source File: AACStream.java From libstreaming with Apache License 2.0 | 4 votes |
@Override @SuppressLint({ "InlinedApi", "NewApi" }) protected void encodeWithMediaCodec() throws IOException { final int bufferSize = AudioRecord.getMinBufferSize(mQuality.samplingRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT)*2; ((AACLATMPacketizer)mPacketizer).setSamplingRate(mQuality.samplingRate); mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, mQuality.samplingRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize); mMediaCodec = MediaCodec.createEncoderByType("audio/mp4a-latm"); MediaFormat format = new MediaFormat(); format.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm"); format.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitRate); format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1); format.setInteger(MediaFormat.KEY_SAMPLE_RATE, mQuality.samplingRate); format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC); format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, bufferSize); mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); mAudioRecord.startRecording(); mMediaCodec.start(); final MediaCodecInputStream inputStream = new MediaCodecInputStream(mMediaCodec); final ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers(); mThread = new Thread(new Runnable() { @Override public void run() { int len = 0, bufferIndex = 0; try { while (!Thread.interrupted()) { bufferIndex = mMediaCodec.dequeueInputBuffer(10000); if (bufferIndex>=0) { inputBuffers[bufferIndex].clear(); len = mAudioRecord.read(inputBuffers[bufferIndex], bufferSize); if (len == AudioRecord.ERROR_INVALID_OPERATION || len == AudioRecord.ERROR_BAD_VALUE) { Log.e(TAG,"An error occured with the AudioRecord API !"); } else { //Log.v(TAG,"Pushing raw audio to the decoder: len="+len+" bs: "+inputBuffers[bufferIndex].capacity()); mMediaCodec.queueInputBuffer(bufferIndex, 0, len, System.nanoTime()/1000, 0); } } } } catch (RuntimeException e) { e.printStackTrace(); } } }); mThread.start(); // The packetizer encapsulates this stream in an RTP stream and send it over the network mPacketizer.setInputStream(inputStream); mPacketizer.start(); mStreaming = true; }
Example 18
Source File: MicOpusRecorder.java From DeviceConnect-Android with MIT License | 4 votes |
/** * 音声をレコードして、MediaCodec に渡します. */ private void recordAudio() throws NativeInterfaceException { int samplingRate = mSamplingRate.getValue(); int channels = mChannels == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO; int audioFormat = AudioFormat.ENCODING_PCM_16BIT; int bufferSize = AudioRecord.getMinBufferSize(samplingRate, channels, audioFormat) * 4; int oneFrameDataCount = mSamplingRate.getValue() / mFrameSize.getFps(); mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, samplingRate, channels, audioFormat, bufferSize); if (mAudioRecord.getState() != AudioRecord.STATE_INITIALIZED) { if (mAudioRecordCallback != null) { mAudioRecordCallback.onEncoderError(); } return; } if (mUseAEC && AcousticEchoCanceler.isAvailable()) { // ノイズキャンセラー mEchoCanceler = AcousticEchoCanceler.create(mAudioRecord.getAudioSessionId()); if (mEchoCanceler != null) { int ret = mEchoCanceler.setEnabled(true); if (ret != AudioEffect.SUCCESS) { if (DEBUG) { Log.w(TAG, "AcousticEchoCanceler is not supported."); } } } } OpusEncoder opusEncoder = null; try { opusEncoder = new OpusEncoder(mSamplingRate, mChannels, mFrameSize, mBitRate, mApplication); mAudioRecord.startRecording(); short[] emptyBuffer = new short[oneFrameDataCount]; short[] pcmBuffer = new short[oneFrameDataCount]; byte[] opusFrameBuffer = opusEncoder.bufferAllocate(); while (!mStopFlag) { int readSize = mAudioRecord.read(pcmBuffer, 0, oneFrameDataCount); if (readSize > 0) { int opusFrameBufferLength; if (isMute()) { opusFrameBufferLength = opusEncoder.encode(emptyBuffer, readSize, opusFrameBuffer); } else { opusFrameBufferLength = opusEncoder.encode(pcmBuffer, readSize, opusFrameBuffer); } if (opusFrameBufferLength > 0 && mAudioRecordCallback != null) { mAudioRecordCallback.onPeriodicNotification(opusFrameBuffer, opusFrameBufferLength); } } else if (readSize == AudioRecord.ERROR_INVALID_OPERATION) { if (DEBUG) { Log.e(TAG, "Invalid operation error."); } break; } else if (readSize == AudioRecord.ERROR_BAD_VALUE) { if (DEBUG) { Log.e(TAG, "Bad value error."); } break; } else if (readSize == AudioRecord.ERROR) { if (DEBUG) { Log.e(TAG, "Unknown error."); } break; } } } finally { if (mEchoCanceler != null) { mEchoCanceler.release(); mEchoCanceler = null; } if (opusEncoder != null) { opusEncoder.release(); } } }
Example 19
Source File: AudioRecorder.java From connectivity-samples with Apache License 2.0 | 4 votes |
@Override protected boolean validSize(int size) { return size != AudioRecord.ERROR && size != AudioRecord.ERROR_BAD_VALUE; }
Example 20
Source File: AudioCapture.java From EvilsLive with MIT License | 3 votes |
public boolean startCapture(int audioSource, int sampleRateInHz, int channelConfig, int audioFormat) { if (mIsCaptureStarted) { Log.e(TAG, "hujd Capture already started !"); return false; } mMinBufferSize = AudioRecord.getMinBufferSize(sampleRateInHz, channelConfig, audioFormat); if (mMinBufferSize == AudioRecord.ERROR_BAD_VALUE) { Log.e(TAG, "hujd Invalid parameter !"); return false; } Log.e(TAG, "hujd getMinBufferSize = " + mMinBufferSize + " bytes !"); mAudioRecord = new AudioRecord(audioSource, sampleRateInHz, channelConfig, audioFormat, mMinBufferSize); if (mAudioRecord.getState() == AudioRecord.STATE_UNINITIALIZED) { Log.e(TAG, "hujd AudioRecord initialize fail !"); return false; } mAudioRecord.startRecording(); mIsLoopExit = false; mCaptureThread = new Thread(new AudioCaptureRunnable()); mCaptureThread.start(); mIsCaptureStarted = true; Log.e(TAG, "hujd Start audio capture success !"); return true; }