Java Code Examples for android.media.AudioRecord#release()
The following examples show how to use
android.media.AudioRecord#release() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: RecordAudioTester.java From PermissionAgent with Apache License 2.0 | 6 votes |
@Override public boolean test() throws Throwable { AudioRecord audioRecord = findAudioRecord(); try { if (audioRecord != null) { audioRecord.startRecording(); } else { return !existMicrophone(mContext); } } catch (Throwable e) { return !existMicrophone(mContext); } finally { if (audioRecord != null) { audioRecord.stop(); audioRecord.release(); } } return true; }
Example 2
Source File: SpeechRecognizer.java From pocketsphinx-android with BSD 2-Clause "Simplified" License | 6 votes |
/** * Creates speech recognizer. Recognizer holds the AudioRecord object, so you * need to call {@link release} in order to properly finalize it. * * @param config The configuration object * @throws IOException thrown if audio recorder can not be created for some reason. */ protected SpeechRecognizer(Config config) throws IOException { decoder = new Decoder(config); sampleRate = (int)decoder.getConfig().getFloat("-samprate"); bufferSize = Math.round(sampleRate * BUFFER_SIZE_SECONDS); recorder = new AudioRecord( AudioSource.VOICE_RECOGNITION, sampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize * 2); if (recorder.getState() == AudioRecord.STATE_UNINITIALIZED) { recorder.release(); throw new IOException( "Failed to initialize recorder. Microphone might be already in use."); } }
Example 3
Source File: RapidRecognizer.java From RapidSphinx with MIT License | 6 votes |
public RecognizerThread(int timeout) { if (timeout != NO_TIMEOUT) { this.timeoutSamples = timeout * sampleRate / 1000; } else { this.timeoutSamples = NO_TIMEOUT; } this.remainingSamples = this.timeoutSamples; recorder = new AudioRecord(6, sampleRate, 16, 2, bufferSize * 2); if (recorder.getState() == AudioRecord.STATE_UNINITIALIZED) { recorder.release(); try { throw new IOException( "Failed to initialize recorder. Microphone might be already in use."); } catch (IOException e) { e.printStackTrace(); } } }
Example 4
Source File: SoundFile.java From MusicPlayer with GNU General Public License v3.0 | 5 votes |
private void RecordAudio() { if (mProgressListener == null) { // A progress listener is mandatory here, as it will let us know when to stop recording. return; } mInputFile = null; mFileType = "raw"; mFileSize = 0; mSampleRate = 44100; mChannels = 1; // record mono audio. short[] buffer = new short[1024]; // buffer contains 1 mono frame of 1024 16 bits samples int minBufferSize = AudioRecord.getMinBufferSize( mSampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); // make sure minBufferSize can contain at least 1 second of audio (16 bits sample). if (minBufferSize < mSampleRate * 2) { minBufferSize = mSampleRate * 2; } AudioRecord audioRecord = new AudioRecord( MediaRecorder.AudioSource.DEFAULT, mSampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, minBufferSize ); // Allocate memory for 20 seconds first. Reallocate later if more is needed. mDecodedBytes = ByteBuffer.allocate(20 * mSampleRate * 2); mDecodedBytes.order(ByteOrder.LITTLE_ENDIAN); mDecodedSamples = mDecodedBytes.asShortBuffer(); audioRecord.startRecording(); while (true) { // check if mDecodedSamples can contain 1024 additional samples. if (mDecodedSamples.remaining() < 1024) { // Try to allocate memory for 10 additional seconds. int newCapacity = mDecodedBytes.capacity() + 10 * mSampleRate * 2; ByteBuffer newDecodedBytes = null; try { newDecodedBytes = ByteBuffer.allocate(newCapacity); } catch (OutOfMemoryError oome) { break; } int position = mDecodedSamples.position(); mDecodedBytes.rewind(); newDecodedBytes.put(mDecodedBytes); mDecodedBytes = newDecodedBytes; mDecodedBytes.order(ByteOrder.LITTLE_ENDIAN); mDecodedBytes.rewind(); mDecodedSamples = mDecodedBytes.asShortBuffer(); mDecodedSamples.position(position); } // TODO(nfaralli): maybe use the read method that takes a direct ByteBuffer argument. audioRecord.read(buffer, 0, buffer.length); mDecodedSamples.put(buffer); // Let the progress listener know how many seconds have been recorded. // The returned value tells us if we should keep recording or stop. if (!mProgressListener.reportProgress( (float)(mDecodedSamples.position()) / mSampleRate)) { break; } } audioRecord.stop(); audioRecord.release(); mNumSamples = mDecodedSamples.position(); mDecodedSamples.rewind(); mDecodedBytes.rewind(); mAvgBitRate = mSampleRate * 16 / 1000; // Temporary hack to make it work with the old version. mNumFrames = mNumSamples / getSamplesPerFrame(); if (mNumSamples % getSamplesPerFrame() != 0){ mNumFrames++; } mFrameGains = new int[mNumFrames]; mFrameLens = null; // not needed for recorded audio mFrameOffsets = null; // not needed for recorded audio int i, j; int gain, value; for (i=0; i<mNumFrames; i++){ gain = -1; for(j=0; j<getSamplesPerFrame(); j++) { if (mDecodedSamples.remaining() > 0) { value = java.lang.Math.abs(mDecodedSamples.get()); } else { value = 0; } if (gain < value) { gain = value; } } mFrameGains[i] = (int)Math.sqrt(gain); // here gain = sqrt(max value of 1st channel)... } mDecodedSamples.rewind(); // DumpSamples(); // Uncomment this line to dump the samples in a TSV file. }
Example 5
Source File: AppRTCAudioManager.java From Conversations with GNU General Public License v3.0 | 5 votes |
private static void release(final AudioRecord audioRecord) { if (audioRecord == null) { return; } try { audioRecord.release(); } catch (Exception e) { //ignore } }
Example 6
Source File: MediaAudioEncoder.java From UVCCameraZxing with Apache License 2.0 | 5 votes |
@Override public void run() { try { final int buf_sz = AudioRecord.getMinBufferSize( SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT) * 4; final AudioRecord audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buf_sz); try { if (mIsCapturing) { if (DEBUG) Log.v(TAG, "AudioThread:start audio recording"); final byte[] buf = new byte[buf_sz]; int readBytes; audioRecord.startRecording(); try { while (mIsCapturing && !mRequestStop && !mIsEOS) { // read audio data from internal mic readBytes = audioRecord.read(buf, 0, buf_sz); if (readBytes > 0) { // set audio data to encoder encode(buf, readBytes, getPTSUs()); frameAvailableSoon(); } } frameAvailableSoon(); } finally { audioRecord.stop(); } } } finally { audioRecord.release(); } } catch (final Exception e) { Log.e(TAG, "AudioThread#run", e); } if (DEBUG) Log.v(TAG, "AudioThread:finished"); }
Example 7
Source File: Recorder.java From VideoAndroid with Apache License 2.0 | 5 votes |
@Override public void run() { android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO); // Audio int bufferSize; ShortBuffer audioData; int bufferReadResult; bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize); audioData = ShortBuffer.allocate(bufferSize); mAudioRecord.startRecording(); /* ffmpeg_audio encoding loop */ while (mRunAudioThread) { //获取音频数据 bufferReadResult = mAudioRecord.read(audioData.array(), 0, audioData.capacity()); audioData.limit(bufferReadResult); if (bufferReadResult > 0) { if(mFFmpegFrameRecorder != null && mRecording) { try { mFFmpegFrameRecorder.recordSamples(audioData); //写入音频数据 } catch (FFmpegFrameRecorder.Exception e) { e.printStackTrace(); } } } } /* encoding finish, release recorder */ if (mAudioRecord != null) { mAudioRecord.stop(); mAudioRecord.release(); } }
Example 8
Source File: AppRTCAudioManager.java From Pix-Art-Messenger with GNU General Public License v3.0 | 5 votes |
private static void release(final AudioRecord audioRecord) { if (audioRecord == null) { return; } try { audioRecord.release(); } catch (Exception e) { //ignore } }
Example 9
Source File: AudioRecorder.java From TikTok with Apache License 2.0 | 4 votes |
@Override public void run() { try { //初始化音频 int bufferSizeInBytes = AudioRecord .getMinBufferSize(audioSampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); final AudioRecord audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, audioSampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes); if(audioRecord == null){ mOnAudioRecorderListener.onNotPermission(); return ; } audioRecord.startRecording(); /** * 根据开始录音判断是否有录音权限 */ if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING && audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_STOPPED) { // AVLogUtils.e(TAG, "audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING : " + audioRecord.getRecordingState()); isAudioPermission = false; } if (audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_STOPPED) { //如果短时间内频繁检测,会造成audioRecord还未销毁完成,此时检测会返回RECORDSTATE_STOPPED状态,再去read,会读到0的size,可以更具自己的需求返回true或者false isAudioPermission = false; } if(!isAudioPermission){ mOnAudioRecorderListener.onNotPermission(); return ; } mOnAudioRecorderListener.onCanRecord(isAudioPermission); byte[] data = new byte[2048]; while(isRecord){ if(audioRecord == null){ return ; } int offset = 0; while(offset < 2048) { int readSize = audioRecord.read(data, offset, data.length-offset); offset+=readSize; } if(isAudioRecordWrite){//写入文件 HeyhouRecorder.getInstance().recordAudioNHW(data,audioSampleRate,HeyhouRecorder.FORMAT_S16,1024); } } audioRecord.stop(); audioRecord.release(); }catch (Exception e) { e.printStackTrace(); mOnAudioRecorderListener.onRecordError("录音失败"); } }
Example 10
Source File: CheckPermissionUtil.java From TikTok with Apache License 2.0 | 4 votes |
/** * 判断是是否有录音权限 */ public static boolean isHasAudioPermission(final Context context){ int bufferSizeInBytes = 0; bufferSizeInBytes = AudioRecord.getMinBufferSize(44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); AudioRecord audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, 44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes); //开始录制音频 //开始录制音频 try{ // 防止某些手机崩溃,例如联想 audioRecord.startRecording(); }catch (IllegalStateException e){ e.printStackTrace(); // AVLogUtils.e(TAG, Log.getStackTraceString(e)); } /** * 根据开始录音判断是否有录音权限 */ if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING && audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_STOPPED) { // AVLogUtils.e(TAG, "audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING : " + audioRecord.getRecordingState()); return false; } if (audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_STOPPED) { //如果短时间内频繁检测,会造成audioRecord还未销毁完成,此时检测会返回RECORDSTATE_STOPPED状态,再去read,会读到0的size,可以更具自己的需求返回true或者false return false; } byte[] bytes = new byte[1024]; int readSize = audioRecord.read(bytes, 0, 1024); if (readSize == AudioRecord.ERROR_INVALID_OPERATION || readSize <= 0) { // AVLogUtils.e(TAG, "readSize illegal : " + readSize); return false; } audioRecord.stop(); audioRecord.release(); audioRecord = null; return true; }
Example 11
Source File: PermissionUtil.java From AlbumCameraRecorder with MIT License | 4 votes |
/** * 用于检测是否具有录音权限 * * @return 状态:是否具有 */ public static int getRecordState() { int minBuffer = AudioRecord.getMinBufferSize(44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat .ENCODING_PCM_16BIT); AudioRecord audioRecord = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, 44100, AudioFormat .CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, (minBuffer * 100)); short[] point = new short[minBuffer]; int readSize; try { audioRecord.startRecording();//检测是否可以进入初始化状态 } catch (Exception e) { if (audioRecord != null) { audioRecord.release(); audioRecord = null; } return STATE_NO_PERMISSION; } if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) { //6.0以下机型都会返回此状态,故使用时需要判断bulid版本 //检测是否在录音中 if (audioRecord != null) { audioRecord.stop(); audioRecord.release(); Log.d("CheckAudioPermission", "录音机被占用"); } return STATE_RECORDING; } else { //检测是否可以获取录音结果 readSize = audioRecord.read(point, 0, point.length); if (readSize <= 0) { if (audioRecord != null) { audioRecord.stop(); audioRecord.release(); } Log.d("CheckAudioPermission", "录音的结果为空"); return STATE_NO_PERMISSION; } else { if (audioRecord != null) { audioRecord.stop(); audioRecord.release(); } return STATE_SUCCESS; } } }
Example 12
Source File: CheckPermission.java From EasyPhotos with Apache License 2.0 | 4 votes |
/** * 用于检测是否具有录音权限 * * @return */ public static int getRecordState() { int minBuffer = AudioRecord.getMinBufferSize(44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat .ENCODING_PCM_16BIT); AudioRecord audioRecord = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, 44100, AudioFormat .CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, (minBuffer * 100)); short[] point = new short[minBuffer]; int readSize = 0; try { audioRecord.startRecording();//检测是否可以进入初始化状态 } catch (Exception e) { if (audioRecord != null) { audioRecord.release(); audioRecord = null; } return STATE_NO_PERMISSION; } if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) { //6.0以下机型都会返回此状态,故使用时需要判断bulid版本 //检测是否在录音中 if (audioRecord != null) { audioRecord.stop(); audioRecord.release(); audioRecord = null; LogUtil.d("录音机被占用"); } return STATE_RECORDING; } else { //检测是否可以获取录音结果 readSize = audioRecord.read(point, 0, point.length); if (readSize <= 0) { if (audioRecord != null) { audioRecord.stop(); audioRecord.release(); audioRecord = null; } LogUtil.d("录音的结果为空"); return STATE_NO_PERMISSION; } else { if (audioRecord != null) { audioRecord.stop(); audioRecord.release(); audioRecord = null; } return STATE_SUCCESS; } } }
Example 13
Source File: SoundRecorder.java From AAVT with Apache License 2.0 | 4 votes |
public void start(){ if(!isStarted){ stopFlag=false; mRecordBufferSize = AudioRecord.getMinBufferSize(mRecordSampleRate, mRecordChannelConfig, mRecordAudioFormat)*2; mRecord=new AudioRecord(MediaRecorder.AudioSource.MIC,mRecordSampleRate,mRecordChannelConfig, mRecordAudioFormat,mRecordBufferSize); mRecord.startRecording(); try { MediaFormat format=convertAudioConfigToFormat(mConfig.mAudio); mAudioEncoder=MediaCodec.createEncoderByType(format.getString(MediaFormat.KEY_MIME)); mAudioEncoder.configure(format,null,null,MediaCodec.CONFIGURE_FLAG_ENCODE); mAudioEncoder.start(); } catch (IOException e) { e.printStackTrace(); } Thread thread=new Thread(new Runnable() { @Override public void run() { while (!stopFlag&&!audioEncodeStep(false)){}; audioEncodeStep(true); Log.e("wuwang","audio stop"); if(isStarted){ mRecord.stop(); mRecord.release(); mRecord=null; } if(mAudioEncoder!=null){ mAudioEncoder.stop(); mAudioEncoder.release(); mAudioEncoder=null; } isStarted=false; } }); thread.start(); startTime=SystemClock.elapsedRealtimeNanos(); isStarted=true; } }
Example 14
Source File: MicrophoneCollector.java From sensordatacollector with GNU General Public License v2.0 | 4 votes |
private void doTask() { // http://stackoverflow.com/questions/10655703/what-does-androids-getmaxamplitude-function-for-the-mediarecorder-actually-gi int bufferSize = AudioRecord.getMinBufferSize(44100, AudioFormat.CHANNEL_IN_DEFAULT, AudioFormat.ENCODING_PCM_16BIT); // making the buffer bigger.... bufferSize = bufferSize * 4; // AudioRecord recorder = new AudioRecord(MediaRecorder.AudioSource.MIC, 44100, AudioFormat.CHANNEL_IN_DEFAULT, AudioFormat.ENCODING_PCM_16BIT, bufferSize); AudioRecord recorder = findAudioRecord(); if(recorder == null) { Activity main = ActivityController.getInstance().get("MainActivity"); if(main != null) { Utils.makeToast2(main, R.string.sensor_microphone_error, Toast.LENGTH_SHORT); } return; } short data[] = new short[bufferSize]; double average = 0.0; recorder.startRecording(); // recording data; recorder.read(data, 0, bufferSize); recorder.stop(); for(short s : data) { if(s > 0) { average += Math.abs(s); } else { bufferSize--; } } // x=max; double x = average / bufferSize; recorder.release(); double db; if(x == 0) { Log.w("TAG", "Warning no sound captured!"); return; } // calculating the pascal pressure based on the idea that the max // amplitude (between 0 and 32767) is // relative to the pressure double pressure = x / 51805.5336; // the value 51805.5336 can be derived // from asuming that x=32767=0.6325 // Pa and x=1 = 0.00002 Pa (the // reference value) db = (20 * Math.log10(pressure / 0.00002)); if(db < 0) { return; } // if( mRecorder == null ) // return; // float maxVolume = (float)(20 * Math.log10(mRecorder.getMaxAmplitude() // / 2700.0)); long time = System.currentTimeMillis(); ContentValues newValues = new ContentValues(); newValues.put(valueNames[0], db); newValues.put(valueNames[1], time); if(db == Double.NEGATIVE_INFINITY || db == Double.POSITIVE_INFINITY || db == Double.NaN) { return; } String deviceID = DeviceID.get(SensorDataCollectorService.getInstance()); MicrophoneCollector.updateLivePlotter(deviceID, new float[]{ (float) db }); MicrophoneCollector.writeDBStorage(deviceID, newValues); }
Example 15
Source File: CheckPermission.java From CameraView with Apache License 2.0 | 4 votes |
/** * 用于检测是否具有录音权限 * * @return */ public static int getRecordState() { int minBuffer = AudioRecord.getMinBufferSize(44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat .ENCODING_PCM_16BIT); AudioRecord audioRecord = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, 44100, AudioFormat .CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, (minBuffer * 100)); short[] point = new short[minBuffer]; int readSize = 0; try { audioRecord.startRecording();//检测是否可以进入初始化状态 } catch (Exception e) { if (audioRecord != null) { audioRecord.release(); audioRecord = null; } return STATE_NO_PERMISSION; } if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) { //6.0以下机型都会返回此状态,故使用时需要判断bulid版本 //检测是否在录音中 if (audioRecord != null) { audioRecord.stop(); audioRecord.release(); audioRecord = null; Log.d("CheckAudioPermission", "录音机被占用"); } return STATE_RECORDING; } else { //检测是否可以获取录音结果 readSize = audioRecord.read(point, 0, point.length); if (readSize <= 0) { if (audioRecord != null) { audioRecord.stop(); audioRecord.release(); audioRecord = null; } Log.d("CheckAudioPermission", "录音的结果为空"); return STATE_NO_PERMISSION; } else { if (audioRecord != null) { audioRecord.stop(); audioRecord.release(); audioRecord = null; } return STATE_SUCCESS; } } }
Example 16
Source File: CheckPermission.java From imsdk-android with MIT License | 4 votes |
/** * 用于检测是否具有录音权限 * * @return */ public static int getRecordState() { int minBuffer = AudioRecord.getMinBufferSize(44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat .ENCODING_PCM_16BIT); AudioRecord audioRecord = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, 44100, AudioFormat .CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, (minBuffer * 100)); short[] point = new short[minBuffer]; int readSize = 0; try { audioRecord.startRecording();//检测是否可以进入初始化状态 } catch (Exception e) { if (audioRecord != null) { audioRecord.release(); audioRecord = null; } return STATE_NO_PERMISSION; } if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) { //6.0以下机型都会返回此状态,故使用时需要判断bulid版本 //检测是否在录音中 if (audioRecord != null) { audioRecord.stop(); audioRecord.release(); audioRecord = null; Log.d("CheckAudioPermission", "录音机被占用"); } return STATE_RECORDING; } else { //检测是否可以获取录音结果 readSize = audioRecord.read(point, 0, point.length); if (readSize <= 0) { if (audioRecord != null) { audioRecord.stop(); audioRecord.release(); audioRecord = null; } Log.d("CheckAudioPermission", "录音的结果为空"); return STATE_NO_PERMISSION; } else { if (audioRecord != null) { audioRecord.stop(); audioRecord.release(); audioRecord = null; } return STATE_SUCCESS; } } }
Example 17
Source File: CheckPermission.java From imsdk-android with MIT License | 4 votes |
/** * 用于检测是否具有录音权限 * * @return */ public static int getRecordState() { int minBuffer = AudioRecord.getMinBufferSize(44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat .ENCODING_PCM_16BIT); AudioRecord audioRecord = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, 44100, AudioFormat .CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, (minBuffer * 100)); short[] point = new short[minBuffer]; int readSize = 0; try { audioRecord.startRecording();//检测是否可以进入初始化状态 } catch (Exception e) { if (audioRecord != null) { audioRecord.release(); audioRecord = null; } return STATE_NO_PERMISSION; } if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) { //6.0以下机型都会返回此状态,故使用时需要判断bulid版本 //检测是否在录音中 if (audioRecord != null) { audioRecord.stop(); audioRecord.release(); audioRecord = null; Log.d("CheckAudioPermission", "录音机被占用"); } return STATE_RECORDING; } else { //检测是否可以获取录音结果 readSize = audioRecord.read(point, 0, point.length); if (readSize <= 0) { if (audioRecord != null) { audioRecord.stop(); audioRecord.release(); audioRecord = null; } Log.d("CheckAudioPermission", "录音的结果为空"); return STATE_NO_PERMISSION; } else { if (audioRecord != null) { audioRecord.stop(); audioRecord.release(); audioRecord = null; } return STATE_SUCCESS; } } }
Example 18
Source File: TLMediaAudioEncoder.java From TimeLapseRecordingSample with Apache License 2.0 | 4 votes |
@Override protected void recordingLoop() { android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO); try { final int min_buffer_size = AudioRecord.getMinBufferSize( mSampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); int buffer_size = SAMPLES_PER_FRAME * FRAMES_PER_BUFFER; if (buffer_size < min_buffer_size) buffer_size = ((min_buffer_size / SAMPLES_PER_FRAME) + 1) * SAMPLES_PER_FRAME * 2; final AudioRecord audioRecord = new AudioRecord( MediaRecorder.AudioSource.MIC, mSampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffer_size); try { if ((audioRecord.getState() == AudioRecord.STATE_INITIALIZED) && (mIsRunning)) { if (DEBUG) Log.v(TAG, "AudioThread:start_from_encoder audio recording"); final ByteBuffer buf = ByteBuffer.allocateDirect(SAMPLES_PER_FRAME); int readBytes; audioRecord.startRecording(); try { while (mIsRunning && isRecording()) { // read audio data from internal mic buf.clear(); readBytes = audioRecord.read(buf, SAMPLES_PER_FRAME); if (readBytes > 0) { // set audio data to encoder encode(buf, readBytes, getPTSUs()); frameAvailableSoon(); } } frameAvailableSoon(); } finally { audioRecord.stop(); } } } finally { audioRecord.release(); } } catch (Exception e) { Log.e(TAG, "AudioThread#run", e); } finally { android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_DEFAULT); } }
Example 19
Source File: AudioSource.java From science-journal with Apache License 2.0 | 4 votes |
private void start() { // FYI: the current thread holds lockAudioReceivers. // Use VOICE_COMMUNICATION to filter out audio coming from the speakers final AudioRecord audioRecord = new AudioRecord( MediaRecorder.AudioSource.VOICE_COMMUNICATION, SAMPLE_RATE_IN_HZ, CHANNEL_CONFIG, AUDIO_FORMAT, minBufferSizeInBytes); if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED) { audioRecord.release(); return; } audioRecord.startRecording(); // AudioRecord.startRecording() logs an error but it has no return value and // doesn't throw an exception when someone else is using the mic. if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) { audioRecord.release(); return; } running.set(true); future = executorService.submit( () -> { short[] buffer = new short[minBufferSizeInBytes / 2]; int offset = 0; boolean goodDataRead = false; while (running.get()) { int readShorts = audioRecord.read(buffer, offset, buffer.length - offset); // On some devices (Moto E, for example) we get a bunch of zeros when we first // start reading. Ignore those zeros. if (!goodDataRead) { int countLeadingZeros = 0; while (countLeadingZeros < readShorts && buffer[countLeadingZeros] == 0) { countLeadingZeros++; } if (countLeadingZeros > 0) { if (readShorts > countLeadingZeros) { System.arraycopy( buffer, countLeadingZeros, buffer, 0, readShorts - countLeadingZeros); } readShorts -= countLeadingZeros; } goodDataRead = (readShorts > 0); } offset += readShorts; // If the buffer is full, call the Receivers. if (offset == buffer.length) { synchronized (lockAudioReceivers) { for (AudioReceiver audioReceiver : audioReceivers) { audioReceiver.onReceiveAudio(buffer); } } offset = 0; } } audioRecord.stop(); audioRecord.release(); }); }
Example 20
Source File: SoundFile.java From YTPlayer with GNU General Public License v3.0 | 4 votes |
private void RecordAudio() { if (mProgressListener == null) { // A progress listener is mandatory here, as it will let us know when to stop recording. return; } mInputFile = null; mFileType = "raw"; mFileSize = 0; mSampleRate = 44100; mChannels = 1; // record mono audio. short[] buffer = new short[1024]; // buffer contains 1 mono frame of 1024 16 bits samples int minBufferSize = AudioRecord.getMinBufferSize( mSampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); // make sure minBufferSize can contain at least 1 second of audio (16 bits sample). if (minBufferSize < mSampleRate * 2) { minBufferSize = mSampleRate * 2; } AudioRecord audioRecord = new AudioRecord( MediaRecorder.AudioSource.DEFAULT, mSampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, minBufferSize ); // Allocate memory for 20 seconds first. Reallocate later if more is needed. mDecodedBytes = ByteBuffer.allocate(20 * mSampleRate * 2); mDecodedBytes.order(ByteOrder.LITTLE_ENDIAN); mDecodedSamples = mDecodedBytes.asShortBuffer(); audioRecord.startRecording(); while (true) { // check if mDecodedSamples can contain 1024 additional samples. if (mDecodedSamples.remaining() < 1024) { // Try to allocate memory for 10 additional seconds. int newCapacity = mDecodedBytes.capacity() + 10 * mSampleRate * 2; ByteBuffer newDecodedBytes = null; try { newDecodedBytes = ByteBuffer.allocate(newCapacity); } catch (OutOfMemoryError oome) { break; } int position = mDecodedSamples.position(); mDecodedBytes.rewind(); newDecodedBytes.put(mDecodedBytes); mDecodedBytes = newDecodedBytes; mDecodedBytes.order(ByteOrder.LITTLE_ENDIAN); mDecodedBytes.rewind(); mDecodedSamples = mDecodedBytes.asShortBuffer(); mDecodedSamples.position(position); } // TODO(nfaralli): maybe use the read method that takes a direct ByteBuffer argument. audioRecord.read(buffer, 0, buffer.length); mDecodedSamples.put(buffer); // Let the progress listener know how many seconds have been recorded. // The returned value tells us if we should keep recording or stop. if (!mProgressListener.reportProgress( (float)(mDecodedSamples.position()) / mSampleRate)) { break; } } audioRecord.stop(); audioRecord.release(); mNumSamples = mDecodedSamples.position(); mDecodedSamples.rewind(); mDecodedBytes.rewind(); mAvgBitRate = mSampleRate * 16 / 1000; // Temporary hack to make it work with the old version. mNumFrames = mNumSamples / getSamplesPerFrame(); if (mNumSamples % getSamplesPerFrame() != 0){ mNumFrames++; } mFrameGains = new int[mNumFrames]; mFrameLens = null; // not needed for recorded audio mFrameOffsets = null; // not needed for recorded audio int i, j; int gain, value; for (i=0; i<mNumFrames; i++){ gain = -1; for(j=0; j<getSamplesPerFrame(); j++) { if (mDecodedSamples.remaining() > 0) { value = Math.abs(mDecodedSamples.get()); } else { value = 0; } if (gain < value) { gain = value; } } mFrameGains[i] = (int) Math.sqrt(gain); // here gain = sqrt(max value of 1st channel)... } mDecodedSamples.rewind(); // DumpSamples(); // Uncomment this line to dump the samples in a TSV file. }