Java Code Examples for android.media.AudioRecord#getState()
The following examples show how to use
android.media.AudioRecord#getState() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SpeechRecognizer.java From pocketsphinx-android with BSD 2-Clause "Simplified" License | 6 votes |
/** * Creates speech recognizer. Recognizer holds the AudioRecord object, so you * need to call {@link release} in order to properly finalize it. * * @param config The configuration object * @throws IOException thrown if audio recorder can not be created for some reason. */ protected SpeechRecognizer(Config config) throws IOException { decoder = new Decoder(config); sampleRate = (int)decoder.getConfig().getFloat("-samprate"); bufferSize = Math.round(sampleRate * BUFFER_SIZE_SECONDS); recorder = new AudioRecord( AudioSource.VOICE_RECOGNITION, sampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize * 2); if (recorder.getState() == AudioRecord.STATE_UNINITIALIZED) { recorder.release(); throw new IOException( "Failed to initialize recorder. Microphone might be already in use."); } }
Example 2
Source File: AudioRecorder.java From react-native-google-nearby-connection with MIT License | 6 votes |
public AudioRecord findAudioRecord() { for (int rate : AudioBuffer.POSSIBLE_SAMPLE_RATES) { for (short audioFormat : new short[] { AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT }) { for (short channelConfig : new short[] { AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_IN_STEREO }) { try { Log.d(TAG, "Attempting rate " + rate + "Hz, bits: " + audioFormat + ", channel: " + channelConfig); int bufferSize = AudioRecord.getMinBufferSize(rate, channelConfig, audioFormat); if (bufferSize != AudioRecord.ERROR_BAD_VALUE) { // check if we can instantiate and have a success AudioRecord recorder = new AudioRecord(AudioSource.DEFAULT, rate, channelConfig, audioFormat, bufferSize); if (recorder.getState() == AudioRecord.STATE_INITIALIZED) { return recorder; } } } catch (Exception e) { Log.e(TAG, rate + "Exception, keep trying.",e); } } } } return null; }
Example 3
Source File: MicRecorder.java From ScreenCapture with MIT License | 6 votes |
private static AudioRecord createAudioRecord(int sampleRateInHz, int channelConfig, int audioFormat) { int minBytes = AudioRecord.getMinBufferSize(sampleRateInHz, channelConfig, audioFormat); if (minBytes <= 0) { Log.e(TAG, String.format(Locale.US, "Bad arguments: getMinBufferSize(%d, %d, %d)", sampleRateInHz, channelConfig, audioFormat)); return null; } AudioRecord record = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleRateInHz, channelConfig, audioFormat, minBytes * 2); if (record.getState() == AudioRecord.STATE_UNINITIALIZED) { Log.e(TAG, String.format(Locale.US, "Bad arguments to new AudioRecord %d, %d, %d", sampleRateInHz, channelConfig, audioFormat)); return null; } if (VERBOSE) { Log.i(TAG, "created AudioRecord " + record + ", MinBufferSize= " + minBytes); if (Build.VERSION.SDK_INT >= N) { Log.d(TAG, " size in frame " + record.getBufferSizeInFrames()); } } return record; }
Example 4
Source File: AndroidRecorder.java From LingoRecorder with Apache License 2.0 | 6 votes |
@Override public void startRecording() throws Exception { int buffSize = getBufferSize(); recorder = new AudioRecord(MediaRecorder.AudioSource.MIC, recorderProperty.getSampleRate(), channels, audioFormat, buffSize); if (recorder.getState() != AudioRecord.STATE_INITIALIZED) { throw new RecorderInitException(); } payloadSize = 0; recorder.startRecording(); if (recorder.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) { throw new RecorderStartException(); } }
Example 5
Source File: RapidRecognizer.java From RapidSphinx with MIT License | 6 votes |
public RecognizerThread(int timeout) { if (timeout != NO_TIMEOUT) { this.timeoutSamples = timeout * sampleRate / 1000; } else { this.timeoutSamples = NO_TIMEOUT; } this.remainingSamples = this.timeoutSamples; recorder = new AudioRecord(6, sampleRate, 16, 2, bufferSize * 2); if (recorder.getState() == AudioRecord.STATE_UNINITIALIZED) { recorder.release(); try { throw new IOException( "Failed to initialize recorder. Microphone might be already in use."); } catch (IOException e) { e.printStackTrace(); } } }
Example 6
Source File: UEntropyMic.java From bither-android with Apache License 2.0 | 6 votes |
@Override public void run() { int minBufferSize = AudioRecord.getMinBufferSize(SamplePerSec, ChannelConfiguration, AudioEncoding); if (minBufferSize > MaxBufferSize) { bufferSizeBytes = minBufferSize; } else { bufferSizeBytes = (MaxBufferSize / minBufferSize) * minBufferSize; } audioRecord = new AudioRecord(android.media.MediaRecorder.AudioSource.MIC, SamplePerSec, ChannelConfiguration, AudioEncoding, bufferSizeBytes); if (audioRecord.getState() == AudioRecord.STATE_INITIALIZED) { audioRecord.startRecording(); micHandler.post(readRunnable); visualizer.setVisibility(View.VISIBLE); } else { uncaughtException(Thread.currentThread(), new IllegalStateException ("startRecording() called on an " + "uninitialized AudioRecord.")); } }
Example 7
Source File: ExtAudioCapture.java From PLDroidMediaStreaming with Apache License 2.0 | 5 votes |
public boolean startCapture(int audioSource, int sampleRateInHz, int channelConfig, int audioFormat) { if (mIsCaptureStarted) { Log.e(TAG, "Capture already started !"); return false; } int minBufferSize = AudioRecord.getMinBufferSize(sampleRateInHz, channelConfig, audioFormat); if (minBufferSize == AudioRecord.ERROR_BAD_VALUE) { Log.e(TAG, "Invalid parameter !"); return false; } mAudioRecord = new AudioRecord(audioSource, sampleRateInHz, channelConfig, audioFormat, minBufferSize * 4); if (mAudioRecord.getState() == AudioRecord.STATE_UNINITIALIZED) { Log.e(TAG, "AudioRecord initialize fail !"); return false; } mAudioRecord.startRecording(); mIsLoopExit = false; mCaptureThread = new Thread(new AudioCaptureRunnable()); mCaptureThread.start(); mIsCaptureStarted = true; Log.d(TAG, "Start audio capture success !"); return true; }
Example 8
Source File: AACHelper.java From CameraV with GNU General Public License v3.0 | 5 votes |
private int initAudioRecord(int rate) { try { Log.v("===========Attempting rate ", rate + "Hz, bits: " + audioFormat + ", channel: " + channelConfig); bufferSize = AudioRecord.getMinBufferSize(rate, channelConfig, audioFormat); if (bufferSize != AudioRecord.ERROR_BAD_VALUE) { // check if we can instantiate and have a success recorder = new AudioRecord(AudioSource.MIC, rate, channelConfig, audioFormat, bufferSize); if (recorder.getState() == AudioRecord.STATE_INITIALIZED) { Log.v("===========final rate ", rate + "Hz, bits: " + audioFormat + ", channel: " + channelConfig); return rate; } } } catch (Exception e) { Log.v("error", "" + rate); } return -1; }
Example 9
Source File: AudioProcessor.java From guitar-tuner with Apache License 2.0 | 5 votes |
public void init() { int bufSize = 16384; int avalaibleSampleRates = SAMPLE_RATES.length; int i = 0; do { int sampleRate = SAMPLE_RATES[i]; int minBufSize = AudioRecord.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); if (minBufSize != AudioRecord.ERROR_BAD_VALUE && minBufSize != AudioRecord.ERROR) { mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, Math.max(bufSize, minBufSize * 4)); } i++; } while (i < avalaibleSampleRates && (mAudioRecord == null || mAudioRecord.getState() != AudioRecord.STATE_INITIALIZED)); }
Example 10
Source File: AudioUtils.java From android-utils with Apache License 2.0 | 5 votes |
/** * Check correct buffer size for your AudioRecord instance * * @param audioSource the audio source * @param fs the fs * @param channelConfiguration the channel configuration * @param audioEncoding the audio encoding * @return the int */ public static int getValidBufferSize(int audioSource, int fs, int channelConfiguration, int audioEncoding) { for (int bufferSize : new int[]{ 256, 512, 1024, 2048, 4096 }) { // add the rates you wish to check against AudioRecord audioRecordTemp = new AudioRecord(audioSource, fs, channelConfiguration, audioEncoding, bufferSize); if (audioRecordTemp != null && audioRecordTemp.getState() == AudioRecord.STATE_INITIALIZED) { return bufferSize; } } return 0; }
Example 11
Source File: AudioChannel.java From ssj with GNU General Public License v3.0 | 5 votes |
@Override public void enter(Stream stream_out) throws SSJFatalException { //setup android audio middleware _recorder = new AudioRecord(MediaRecorder.AudioSource.MIC, options.sampleRate.get(), options.channelConfig.get().val, options.audioFormat.get().val, stream_out.tot*10); int state = _recorder.getState(); if (state != 1) { Log.w("unexpected AudioRecord state = " + state); } if(options.scale.get()) { if (options.audioFormat.get() != Cons.AudioFormat.ENCODING_PCM_8BIT && options.audioFormat.get() != Cons.AudioFormat.ENCODING_PCM_16BIT) { Log.e("unsupported audio format for normalization"); } int numBytes = Microphone.audioFormatSampleBytes(options.audioFormat.get().val); _data = new byte[stream_out.num * stream_out.dim * numBytes]; } //startRecording has to be called as close to the first read as possible. _recorder.startRecording(); Log.i("Audio capturing started"); }
Example 12
Source File: AudioCapturer.java From Android with Apache License 2.0 | 5 votes |
public boolean startCapture(int audioSource, int sampleRateInHz, int channelConfig, int audioFormat) { if (mIsCaptureStarted) { Log.e(TAG, "Capture already started !"); return false; } mMinBufferSize = AudioRecord.getMinBufferSize(sampleRateInHz,channelConfig,audioFormat); if (mMinBufferSize == AudioRecord.ERROR_BAD_VALUE) { Log.e(TAG, "Invalid parameter !"); return false; } Log.d(TAG , "getMinBufferSize = "+mMinBufferSize+" bytes !"); mAudioRecord = new AudioRecord(audioSource,sampleRateInHz,channelConfig,audioFormat,mMinBufferSize); if (mAudioRecord.getState() == AudioRecord.STATE_UNINITIALIZED) { Log.e(TAG, "AudioRecord initialize fail !"); return false; } mAudioRecord.startRecording(); mIsLoopExit = false; mCaptureThread = new Thread(new AudioCaptureRunnable()); mCaptureThread.start(); mIsCaptureStarted = true; Log.d(TAG, "Start audio capture success !"); return true; }
Example 13
Source File: SoundLevelDetector.java From sensey with Apache License 2.0 | 5 votes |
private int getValidBufferSize(int audioSource, int fs, int channelConfiguration, int audioEncoding) { for (int bufferSize : new int[]{ 256, 512, 1024, 2048, 4096 }) { // add the rates you wish to check against AudioRecord audioRecordTemp = new AudioRecord(audioSource, fs, channelConfiguration, audioEncoding, bufferSize); if (audioRecordTemp != null && audioRecordTemp.getState() == AudioRecord.STATE_INITIALIZED) { return bufferSize; } } return 0; }
Example 14
Source File: MediaAudioEncoder.java From EZFilter with MIT License | 5 votes |
/** * 查找可用的音频录制器 * * @return */ private AudioRecord findAudioRecord() { int[] samplingRates = new int[]{44100, 22050, 11025, 8000}; int[] audioFormats = new int[]{ AudioFormat.ENCODING_PCM_16BIT, AudioFormat.ENCODING_PCM_8BIT}; int[] channelConfigs = new int[]{ AudioFormat.CHANNEL_IN_STEREO, AudioFormat.CHANNEL_IN_MONO}; for (int rate : samplingRates) { for (int format : audioFormats) { for (int config : channelConfigs) { try { int bufferSize = AudioRecord.getMinBufferSize(rate, config, format); if (bufferSize != AudioRecord.ERROR_BAD_VALUE) { for (int source : AUDIO_SOURCES) { AudioRecord recorder = new AudioRecord(source, rate, config, format, bufferSize * 4); if (recorder.getState() == AudioRecord.STATE_INITIALIZED) { mSamplingRate = rate; return recorder; } } } } catch (Exception e) { Log.e(TAG, "Init AudioRecord Error." + Log.getStackTraceString(e)); } } } } return null; }
Example 15
Source File: ExtAudioCapture.java From PLDroidRTCStreaming with Apache License 2.0 | 5 votes |
public boolean startCapture(int audioSource, int sampleRateInHz, int channelConfig, int audioFormat) { if (mIsCaptureStarted) { Log.e(TAG, "Capture already started !"); return false; } int minBufferSize = AudioRecord.getMinBufferSize(sampleRateInHz, channelConfig, audioFormat); if (minBufferSize == AudioRecord.ERROR_BAD_VALUE) { Log.e(TAG, "Invalid parameter !"); return false; } mAudioRecord = new AudioRecord(audioSource, sampleRateInHz, channelConfig, audioFormat, minBufferSize * 4); if (mAudioRecord.getState() == AudioRecord.STATE_UNINITIALIZED) { Log.e(TAG, "AudioRecord initialize fail !"); return false; } mAudioRecord.startRecording(); mIsLoopExit = false; mCaptureThread = new Thread(new AudioCaptureRunnable()); mCaptureThread.start(); mIsCaptureStarted = true; Log.d(TAG, "Start audio capture success !"); return true; }
Example 16
Source File: RecordAudioTester.java From PermissionAgent with Apache License 2.0 | 5 votes |
private static AudioRecord findAudioRecord() { for (int rate : RATES) { for (short format : new short[] {AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT}) { for (short channel : new short[] {AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_IN_STEREO}) { int buffer = AudioRecord.getMinBufferSize(rate, channel, format); if (buffer != AudioRecord.ERROR_BAD_VALUE) { AudioRecord recorder = new AudioRecord(MediaRecorder.AudioSource.MIC, rate, channel, format, buffer); if (recorder.getState() == AudioRecord.STATE_INITIALIZED) return recorder; } } } } return null; }
Example 17
Source File: AudioSource.java From science-journal with Apache License 2.0 | 4 votes |
private void start() { // FYI: the current thread holds lockAudioReceivers. // Use VOICE_COMMUNICATION to filter out audio coming from the speakers final AudioRecord audioRecord = new AudioRecord( MediaRecorder.AudioSource.VOICE_COMMUNICATION, SAMPLE_RATE_IN_HZ, CHANNEL_CONFIG, AUDIO_FORMAT, minBufferSizeInBytes); if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED) { audioRecord.release(); return; } audioRecord.startRecording(); // AudioRecord.startRecording() logs an error but it has no return value and // doesn't throw an exception when someone else is using the mic. if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) { audioRecord.release(); return; } running.set(true); future = executorService.submit( () -> { short[] buffer = new short[minBufferSizeInBytes / 2]; int offset = 0; boolean goodDataRead = false; while (running.get()) { int readShorts = audioRecord.read(buffer, offset, buffer.length - offset); // On some devices (Moto E, for example) we get a bunch of zeros when we first // start reading. Ignore those zeros. if (!goodDataRead) { int countLeadingZeros = 0; while (countLeadingZeros < readShorts && buffer[countLeadingZeros] == 0) { countLeadingZeros++; } if (countLeadingZeros > 0) { if (readShorts > countLeadingZeros) { System.arraycopy( buffer, countLeadingZeros, buffer, 0, readShorts - countLeadingZeros); } readShorts -= countLeadingZeros; } goodDataRead = (readShorts > 0); } offset += readShorts; // If the buffer is full, call the Receivers. if (offset == buffer.length) { synchronized (lockAudioReceivers) { for (AudioReceiver audioReceiver : audioReceivers) { audioReceiver.onReceiveAudio(buffer); } } offset = 0; } } audioRecord.stop(); audioRecord.release(); }); }
Example 18
Source File: MainActivity.java From android-fskmodem with GNU General Public License v3.0 | 4 votes |
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); /// INIT FSK CONFIG try { mConfig = new FSKConfig(FSKConfig.SAMPLE_RATE_44100, FSKConfig.PCM_16BIT, FSKConfig.CHANNELS_MONO, FSKConfig.SOFT_MODEM_MODE_4, FSKConfig.THRESHOLD_20P); } catch (IOException e1) { e1.printStackTrace(); } /// INIT FSK DECODER mDecoder = new FSKDecoder(mConfig, new FSKDecoderCallback() { @Override public void decoded(byte[] newData) { final String text = new String(newData); runOnUiThread(new Runnable() { public void run() { TextView view = ((TextView) findViewById(R.id.result)); view.setText(view.getText()+text); } }); } }); /// //make sure that the settings of the recorder match the settings of the decoder //most devices cant record anything but 44100 samples in 16bit PCM format... mBufferSize = AudioRecord.getMinBufferSize(FSKConfig.SAMPLE_RATE_44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); //scale up the buffer... reading larger amounts of data //minimizes the chance of missing data because of thread priority mBufferSize *= 10; //again, make sure the recorder settings match the decoder settings mRecorder = new AudioRecord(AudioSource.MIC, FSKConfig.SAMPLE_RATE_44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, mBufferSize); if (mRecorder.getState() == AudioRecord.STATE_INITIALIZED) { mRecorder.startRecording(); //start a thread to read the audio data Thread thread = new Thread(mRecordFeed); thread.setPriority(Thread.MAX_PRIORITY); thread.start(); } else { Log.i("FSKDecoder", "Please check the recorder settings, something is wrong!"); } }
Example 19
Source File: MicOpusRecorder.java From DeviceConnect-Android with MIT License | 4 votes |
/** * 音声をレコードして、MediaCodec に渡します. */ private void recordAudio() throws NativeInterfaceException { int samplingRate = mSamplingRate.getValue(); int channels = mChannels == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO; int audioFormat = AudioFormat.ENCODING_PCM_16BIT; int bufferSize = AudioRecord.getMinBufferSize(samplingRate, channels, audioFormat) * 4; int oneFrameDataCount = mSamplingRate.getValue() / mFrameSize.getFps(); mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, samplingRate, channels, audioFormat, bufferSize); if (mAudioRecord.getState() != AudioRecord.STATE_INITIALIZED) { if (mAudioRecordCallback != null) { mAudioRecordCallback.onEncoderError(); } return; } if (mUseAEC && AcousticEchoCanceler.isAvailable()) { // ノイズキャンセラー mEchoCanceler = AcousticEchoCanceler.create(mAudioRecord.getAudioSessionId()); if (mEchoCanceler != null) { int ret = mEchoCanceler.setEnabled(true); if (ret != AudioEffect.SUCCESS) { if (DEBUG) { Log.w(TAG, "AcousticEchoCanceler is not supported."); } } } } OpusEncoder opusEncoder = null; try { opusEncoder = new OpusEncoder(mSamplingRate, mChannels, mFrameSize, mBitRate, mApplication); mAudioRecord.startRecording(); short[] emptyBuffer = new short[oneFrameDataCount]; short[] pcmBuffer = new short[oneFrameDataCount]; byte[] opusFrameBuffer = opusEncoder.bufferAllocate(); while (!mStopFlag) { int readSize = mAudioRecord.read(pcmBuffer, 0, oneFrameDataCount); if (readSize > 0) { int opusFrameBufferLength; if (isMute()) { opusFrameBufferLength = opusEncoder.encode(emptyBuffer, readSize, opusFrameBuffer); } else { opusFrameBufferLength = opusEncoder.encode(pcmBuffer, readSize, opusFrameBuffer); } if (opusFrameBufferLength > 0 && mAudioRecordCallback != null) { mAudioRecordCallback.onPeriodicNotification(opusFrameBuffer, opusFrameBufferLength); } } else if (readSize == AudioRecord.ERROR_INVALID_OPERATION) { if (DEBUG) { Log.e(TAG, "Invalid operation error."); } break; } else if (readSize == AudioRecord.ERROR_BAD_VALUE) { if (DEBUG) { Log.e(TAG, "Bad value error."); } break; } else if (readSize == AudioRecord.ERROR) { if (DEBUG) { Log.e(TAG, "Unknown error."); } break; } } } finally { if (mEchoCanceler != null) { mEchoCanceler.release(); mEchoCanceler = null; } if (opusEncoder != null) { opusEncoder.release(); } } }
Example 20
Source File: MicAACLATMEncoder.java From DeviceConnect-Android with MIT License | 4 votes |
/** * AudioRecord を開始します. */ private void startAudioRecord() { AudioQuality audioQuality = getAudioQuality(); mBufferSize = AudioRecord.getMinBufferSize(audioQuality.getSamplingRate(), audioQuality.getChannel(), audioQuality.getFormat()) * 2; if (DEBUG) { Log.d(TAG, "AudioQuality: " + audioQuality); } mMuteBuffer = new byte[mBufferSize]; mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, audioQuality.getSamplingRate(), audioQuality.getChannel(), audioQuality.getFormat(), mBufferSize); if (mAudioRecord.getState() != AudioRecord.STATE_INITIALIZED) { postOnError(new MediaEncoderException("AudioRecord is already initialized.")); return; } if (mAudioQuality.isUseAEC() && AcousticEchoCanceler.isAvailable()) { // ノイズキャンセラー mEchoCanceler = AcousticEchoCanceler.create(mAudioRecord.getAudioSessionId()); if (mEchoCanceler != null) { int ret = mEchoCanceler.setEnabled(true); if (ret != AudioEffect.SUCCESS) { if (DEBUG) { Log.w(TAG, "AcousticEchoCanceler is not supported."); } } } } mAudioRecord.startRecording(); mAudioThread = new AudioRecordThread(); mAudioThread.setName("MicAACLATMEncoder"); mAudioThread.start(); }