android.media.AudioFormat Java Examples
The following examples show how to use
android.media.AudioFormat.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: FromFileBase.java From rtmp-rtsp-stream-client-java with Apache License 2.0 | 8 votes |
/** * @param filePath to video MP4 file. * @param bitRate AAC in kb. * @return true if success, false if you get a error (Normally because the encoder selected * doesn't support any configuration seated or your device hasn't a H264 encoder). * @throws IOException Normally file not found. */ public boolean prepareAudio(String filePath, int bitRate) throws IOException { audioPath = filePath; audioDecoder = new AudioDecoder(this, audioDecoderInterface, this); if (!audioDecoder.initExtractor(filePath)) return false; boolean result = audioEncoder.prepareAudioEncoder(bitRate, audioDecoder.getSampleRate(), audioDecoder.isStereo(), 0); prepareAudioRtp(audioDecoder.isStereo(), audioDecoder.getSampleRate()); audioDecoder.prepareAudio(); if (glInterface != null && !(glInterface instanceof OffScreenGlThread)) { int channel = audioDecoder.isStereo() ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO; int buffSize = AudioTrack.getMinBufferSize(audioDecoder.getSampleRate(), channel, AudioFormat.ENCODING_PCM_16BIT); audioTrackPlayer = new AudioTrack(AudioManager.STREAM_MUSIC, audioDecoder.getSampleRate(), channel, AudioFormat.ENCODING_PCM_16BIT, buffSize, AudioTrack.MODE_STREAM); } return result; }
Example #2
Source File: AudioCodec.java From bcm-android with GNU General Public License v3.0 | 6 votes |
public AudioCodec(IRecordFinished finishListener) throws IOException { this.bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); this.audioRecord = createAudioRecord(this.bufferSize); this.mediaCodec = createMediaCodec(this.bufferSize); this.finishListener = finishListener; this.mediaCodec.start(); try { audioRecord.startRecording(); } catch (Exception e) { ALog.e(TAG, "AudioCodec", e); mediaCodec.release(); throw new IOException(e); } }
Example #3
Source File: JSynAndroidAudioDeviceManager.java From processing-sound with GNU Lesser General Public License v2.1 | 6 votes |
public void start() { this.minBufferSize = AudioTrack.getMinBufferSize(this.frameRate, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT); this.bufferSize = (3 * (this.minBufferSize / 2)) & ~3; this.audioTrack = new AudioTrack.Builder() .setAudioAttributes(new AudioAttributes.Builder() .setUsage(AudioAttributes.USAGE_MEDIA) .setContentType(AudioAttributes.CONTENT_TYPE_MUSIC) .build()) .setAudioFormat(new AudioFormat.Builder() .setChannelMask(AudioFormat.CHANNEL_OUT_STEREO) .setEncoding(AudioFormat.ENCODING_PCM_16BIT) .setSampleRate(this.frameRate) .build()) .setBufferSizeInBytes(this.bufferSize) .setTransferMode(AudioTrack.MODE_STREAM) .build(); this.audioTrack.play(); }
Example #4
Source File: Track.java From K-Sonic with MIT License | 6 votes |
private void initDevice(int sampleRate, int numChannels) { if (isJMono) numChannels = 2; mLock.lock(); try { final int format = findFormatFromChannels(numChannels); final int minSize = AudioTrack.getMinBufferSize(sampleRate, format, AudioFormat.ENCODING_PCM_16BIT); mTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, format, AudioFormat.ENCODING_PCM_16BIT, minSize * 4, AudioTrack.MODE_STREAM); mSonic = new Sonic(sampleRate, numChannels); } catch (Exception e) {//IllegalArgumentException throw e; } finally { mLock.unlock(); } }
Example #5
Source File: AudioRecorder.java From react-native-google-nearby-connection with MIT License | 6 votes |
public AudioRecord findAudioRecord() { for (int rate : AudioBuffer.POSSIBLE_SAMPLE_RATES) { for (short audioFormat : new short[] { AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT }) { for (short channelConfig : new short[] { AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_IN_STEREO }) { try { Log.d(TAG, "Attempting rate " + rate + "Hz, bits: " + audioFormat + ", channel: " + channelConfig); int bufferSize = AudioRecord.getMinBufferSize(rate, channelConfig, audioFormat); if (bufferSize != AudioRecord.ERROR_BAD_VALUE) { // check if we can instantiate and have a success AudioRecord recorder = new AudioRecord(AudioSource.DEFAULT, rate, channelConfig, audioFormat, bufferSize); if (recorder.getState() == AudioRecord.STATE_INITIALIZED) { return recorder; } } } catch (Exception e) { Log.e(TAG, rate + "Exception, keep trying.",e); } } } } return null; }
Example #6
Source File: SpeechRecognizer.java From pocketsphinx-android with BSD 2-Clause "Simplified" License | 6 votes |
/** * Creates speech recognizer. Recognizer holds the AudioRecord object, so you * need to call {@link release} in order to properly finalize it. * * @param config The configuration object * @throws IOException thrown if audio recorder can not be created for some reason. */ protected SpeechRecognizer(Config config) throws IOException { decoder = new Decoder(config); sampleRate = (int)decoder.getConfig().getFloat("-samprate"); bufferSize = Math.round(sampleRate * BUFFER_SIZE_SECONDS); recorder = new AudioRecord( AudioSource.VOICE_RECOGNITION, sampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize * 2); if (recorder.getState() == AudioRecord.STATE_UNINITIALIZED) { recorder.release(); throw new IOException( "Failed to initialize recorder. Microphone might be already in use."); } }
Example #7
Source File: RecordingSampler.java From voice-recording-visualizer with Apache License 2.0 | 6 votes |
private void initAudioRecord() { int bufferSize = AudioRecord.getMinBufferSize( RECORDING_SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT ); mAudioRecord = new AudioRecord( MediaRecorder.AudioSource.MIC, RECORDING_SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize ); if (mAudioRecord.getState() == AudioRecord.STATE_INITIALIZED) { mBufSize = bufferSize; } }
Example #8
Source File: Microphone.java From ssj with GNU General Public License v3.0 | 6 votes |
public static Cons.Type audioFormatSampleType(int f) { switch (f) { case AudioFormat.ENCODING_PCM_8BIT: return Cons.Type.CHAR; case AudioFormat.ENCODING_PCM_16BIT: case AudioFormat.ENCODING_DEFAULT: return Cons.Type.SHORT; case AudioFormat.ENCODING_PCM_FLOAT: return Cons.Type.FLOAT; case AudioFormat.ENCODING_INVALID: default: return Cons.Type.UNDEF; } }
Example #9
Source File: AudioEncoder.java From RtmpPublisher with Apache License 2.0 | 6 votes |
/** * prepare the Encoder. call this before start the encoder. */ void prepare(int bitrate, int sampleRate, long startStreamingAt) { int bufferSize = AudioRecord.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); MediaFormat audioFormat = MediaFormat.createAudioFormat(AUDIO_MIME_TYPE, sampleRate, CHANNEL_COUNT); audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC); audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitrate); audioFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, bufferSize); startedEncodingAt = startStreamingAt; try { encoder = MediaCodec.createEncoderByType(AUDIO_MIME_TYPE); encoder.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); } catch (IOException | IllegalStateException e) { e.printStackTrace(); } }
Example #10
Source File: MicrophoneSource.java From media-for-mobile with Apache License 2.0 | 6 votes |
public synchronized void configure(int sampleRate, int channels) { this.sampleRate = sampleRate; recordChannels = channels; switch (recordChannels) { case 1: { androidChannels = AudioFormat.CHANNEL_IN_MONO; } break; case 2: { androidChannels = AudioFormat.CHANNEL_IN_STEREO; } break; } minBufferSize = AudioRecord.getMinBufferSize(sampleRate, androidChannels, audioEncoding); if (minBufferSize < 0) { this.sampleRate = 8000; minBufferSize = AudioRecord.getMinBufferSize(sampleRate, androidChannels, audioEncoding); } }
Example #11
Source File: TTSUtility.java From speech-android-sdk with Apache License 2.0 | 6 votes |
private void initPlayer(){ stopTtsPlayer(); // IMPORTANT: minimum required buffer size for the successful creation of an AudioTrack instance in streaming mode. int bufferSize = AudioTrack.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT); synchronized (this) { audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize, AudioTrack.MODE_STREAM); if (audioTrack != null) audioTrack.play(); } }
Example #12
Source File: AudioThread.java From Viewer with Apache License 2.0 | 6 votes |
public AudioThread(int sampleRateInHz, int channel, long streamId, long decoderId, Media media) { if (channel == 1) { channel_configuration = AudioFormat.CHANNEL_CONFIGURATION_MONO; } else { channel_configuration = AudioFormat.CHANNEL_CONFIGURATION_STEREO; } this.mediaStreamId = streamId; this.decoderId = decoderId; this.media = media; int minBufferSize = AudioTrack.getMinBufferSize(sampleRateInHz, channel_configuration, AudioFormat.ENCODING_PCM_16BIT); if (minBufferSize > audioLength) { audioLength = minBufferSize; } mAudioBuffer = new byte[audioLength]; mAudio = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRateInHz, channel_configuration, AudioFormat.ENCODING_PCM_16BIT, audioLength, AudioTrack.MODE_STREAM); }
Example #13
Source File: AudioUtil.java From Augendiagnose with GNU General Public License v2.0 | 6 votes |
/** * Create a sin wave of certain frequency and duration. * * @param freqHz The frequency in Hertz * @param durationMs The duration in milliseconds * @return An AudioTrack with the corresponding sine wave. */ public static AudioTrack generateTonePulse(final double freqHz, final int durationMs) { int count = (int) (BITRATE * 2.0 * (durationMs / MILLIS_IN_SECOND)) & ~1; short[] samples = new short[count]; for (int i = 0; i < count; i += 2) { short sample = TONE_MAP_2[(int) (2 * i / (BITRATE / freqHz)) % 2]; samples[i] = sample; samples[i + 1] = sample; } @SuppressWarnings("deprecation") AudioTrack track = new AudioTrack(AudioManager.STREAM_MUSIC, (int) BITRATE, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT, count * (Short.SIZE / 8), AudioTrack.MODE_STATIC); // MAGIC_NUMBER track.write(samples, 0, count); return track; }
Example #14
Source File: MediaAudioEncoderRunable.java From GLES2_AUDIO_VIDEO_RECODE with Apache License 2.0 | 6 votes |
/** * 录制前的准备 * * @throws IOException */ @Override public void prepare() throws IOException { mTrackIndex = -1; mMuxerStarted = mIsEndOfStream = false; // mediaFormat配置 final MediaFormat audioFormat = MediaFormat.createAudioFormat(MIME_TYPE, SAMPLE_RATE, 1); audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC); audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, AudioFormat.CHANNEL_IN_MONO); audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE); audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1); // mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE); mMediaCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); mMediaCodec.start(); if (mMediaEncoderListener != null) { try { mMediaEncoderListener.onPrepared(this); } catch (final Exception e) { LogUtils.e(TAG, "prepare:", e); } } }
Example #15
Source File: MjpegPlayerActivity.java From CameraV with GNU General Public License v3.0 | 6 votes |
public void initAudio(String vfsPath) throws Exception { isAudio = new BufferedInputStream(new FileInputStream(vfsPath)); if (useAAC) { aac = new AACHelper(); aac.setDecoder(MediaConstants.sAudioSampleRate, MediaConstants.sAudioChannels, MediaConstants.sAudioBitRate); } else { int minBufferSize = AudioTrack.getMinBufferSize(MediaConstants.sAudioSampleRate, MediaConstants.sChannelConfigOut, AudioFormat.ENCODING_PCM_16BIT)*8; at = new AudioTrack(AudioManager.STREAM_MUSIC, MediaConstants.sAudioSampleRate, MediaConstants.sChannelConfigOut, AudioFormat.ENCODING_PCM_16BIT, minBufferSize, AudioTrack.MODE_STREAM); } }
Example #16
Source File: DefaultAudioSink.java From TelePlus-Android with GNU General Public License v2.0 | 6 votes |
@TargetApi(21) private AudioTrack createAudioTrackV21() { android.media.AudioAttributes attributes; if (tunneling) { attributes = new android.media.AudioAttributes.Builder() .setContentType(android.media.AudioAttributes.CONTENT_TYPE_MOVIE) .setFlags(android.media.AudioAttributes.FLAG_HW_AV_SYNC) .setUsage(android.media.AudioAttributes.USAGE_MEDIA) .build(); } else { attributes = audioAttributes.getAudioAttributesV21(); } AudioFormat format = new AudioFormat.Builder() .setChannelMask(outputChannelConfig) .setEncoding(outputEncoding) .setSampleRate(outputSampleRate) .build(); int audioSessionId = this.audioSessionId != C.AUDIO_SESSION_ID_UNSET ? this.audioSessionId : AudioManager.AUDIO_SESSION_ID_GENERATE; return new AudioTrack(attributes, format, bufferSize, MODE_STREAM, audioSessionId); }
Example #17
Source File: AbstractTLMediaAudioEncoder.java From TimeLapseRecordingSample with Apache License 2.0 | 6 votes |
@Override protected MediaFormat internal_prepare() throws IOException { if (DEBUG) Log.v(TAG, "prepare:"); // prepare MediaCodec for AAC encoding of audio data from inernal mic. final MediaCodecInfo audioCodecInfo = selectAudioCodec(MIME_TYPE); if (audioCodecInfo == null) { Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE); return null; } if (DEBUG) Log.i(TAG, "selected codec: " + audioCodecInfo.getName()); final MediaFormat format = MediaFormat.createAudioFormat(MIME_TYPE, mSampleRate, 1); format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC); format.setInteger(MediaFormat.KEY_CHANNEL_MASK, AudioFormat.CHANNEL_IN_MONO); format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate); format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1); // format.setLong(MediaFormat.KEY_MAX_INPUT_SIZE, inputFile.length()); // format.setLong(MediaFormat.KEY_DURATION, (long)durationInMs ); if (DEBUG) Log.i(TAG, "prepare finishing:format=" + format); return format; }
Example #18
Source File: MicrophoneManager.java From rtmp-rtsp-stream-client-java with Apache License 2.0 | 6 votes |
/** * Create audio record with params and AudioPlaybackCaptureConfig used for capturing internal audio * Notice that you should granted {@link android.Manifest.permission#RECORD_AUDIO} before calling this! * * @param config - AudioPlaybackCaptureConfiguration received from {@link android.media.projection.MediaProjection} * * @see AudioPlaybackCaptureConfiguration.Builder#Builder(MediaProjection) * @see "https://developer.android.com/guide/topics/media/playback-capture" * @see "https://medium.com/@debuggingisfun/android-10-audio-capture-77dd8e9070f9" */ public void createInternalMicrophone(AudioPlaybackCaptureConfiguration config, int sampleRate, boolean isStereo) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { this.sampleRate = sampleRate; if (!isStereo) channel = AudioFormat.CHANNEL_IN_MONO; audioRecord = new AudioRecord.Builder() .setAudioPlaybackCaptureConfig(config) .setAudioFormat(new AudioFormat.Builder() .setEncoding(audioFormat) .setSampleRate(sampleRate) .setChannelMask(channel) .build()) .setBufferSizeInBytes(getPcmBufferSize()) .build(); audioPostProcessEffect = new AudioPostProcessEffect(audioRecord.getAudioSessionId()); String chl = (isStereo) ? "Stereo" : "Mono"; Log.i(TAG, "Internal microphone created, " + sampleRate + "hz, " + chl); created = true; } else createMicrophone(sampleRate, isStereo, false, false); }
Example #19
Source File: AndroidAudioForJSyn.java From jsyn with Apache License 2.0 | 5 votes |
public void start() { Process.setThreadPriority(-5); minBufferSize = AudioTrack.getMinBufferSize(frameRate, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_FLOAT); System.out.println("Audio minBufferSize = " + minBufferSize); bufferSize = (3 * (minBufferSize / 2)) & ~3; System.out.println("Audio bufferSize = " + bufferSize); audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, frameRate, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_FLOAT, bufferSize, AudioTrack.MODE_STREAM); audioTrack.play(); }
Example #20
Source File: FileSynthesisCallback.java From android_9.0.0_r45 with Apache License 2.0 | 5 votes |
private ByteBuffer makeWavHeader(int sampleRateInHz, int audioFormat, int channelCount, int dataLength) { int sampleSizeInBytes = AudioFormat.getBytesPerSample(audioFormat); int byteRate = sampleRateInHz * sampleSizeInBytes * channelCount; short blockAlign = (short) (sampleSizeInBytes * channelCount); short bitsPerSample = (short) (sampleSizeInBytes * 8); byte[] headerBuf = new byte[WAV_HEADER_LENGTH]; ByteBuffer header = ByteBuffer.wrap(headerBuf); header.order(ByteOrder.LITTLE_ENDIAN); header.put(new byte[]{ 'R', 'I', 'F', 'F' }); header.putInt(dataLength + WAV_HEADER_LENGTH - 8); // RIFF chunk size header.put(new byte[]{ 'W', 'A', 'V', 'E' }); header.put(new byte[]{ 'f', 'm', 't', ' ' }); header.putInt(16); // size of fmt chunk header.putShort(WAV_FORMAT_PCM); header.putShort((short) channelCount); header.putInt(sampleRateInHz); header.putInt(byteRate); header.putShort(blockAlign); header.putShort(bitsPerSample); header.put(new byte[]{ 'd', 'a', 't', 'a' }); header.putInt(dataLength); header.flip(); return header; }
Example #21
Source File: SoundTrigger.java From android_9.0.0_r45 with Apache License 2.0 | 5 votes |
/** @hide */ public RecognitionEvent(int status, int soundModelHandle, boolean captureAvailable, int captureSession, int captureDelayMs, int capturePreambleMs, boolean triggerInData, AudioFormat captureFormat, byte[] data) { this.status = status; this.soundModelHandle = soundModelHandle; this.captureAvailable = captureAvailable; this.captureSession = captureSession; this.captureDelayMs = captureDelayMs; this.capturePreambleMs = capturePreambleMs; this.triggerInData = triggerInData; this.captureFormat = captureFormat; this.data = data; }
Example #22
Source File: AbstractAudioRecorder.java From speechutils with Apache License 2.0 | 5 votes |
protected int getBufferSize() { int minBufferSizeInBytes = SpeechRecord.getMinBufferSize(mSampleRate, AudioFormat.CHANNEL_IN_MONO, RESOLUTION); if (minBufferSizeInBytes == SpeechRecord.ERROR_BAD_VALUE) { throw new IllegalArgumentException("SpeechRecord.getMinBufferSize: parameters not supported by hardware"); } else if (minBufferSizeInBytes == SpeechRecord.ERROR) { Log.e("SpeechRecord.getMinBufferSize: unable to query hardware for output properties"); minBufferSizeInBytes = mSampleRate * (120 / 1000) * RESOLUTION_IN_BYTES * CHANNELS; } int bufferSize = BUFFER_SIZE_MULTIPLIER * minBufferSizeInBytes; Log.i("SpeechRecord buffer size: " + bufferSize + ", min size = " + minBufferSizeInBytes); return bufferSize; }
Example #23
Source File: DefaultAudioSink.java From Telegram with GNU General Public License v2.0 | 5 votes |
private static AudioTrack initializeKeepSessionIdAudioTrack(int audioSessionId) { int sampleRate = 4000; // Equal to private AudioTrack.MIN_SAMPLE_RATE. int channelConfig = AudioFormat.CHANNEL_OUT_MONO; @C.PcmEncoding int encoding = C.ENCODING_PCM_16BIT; int bufferSize = 2; // Use a two byte buffer, as it is not actually used for playback. return new AudioTrack(C.STREAM_TYPE_DEFAULT, sampleRate, channelConfig, encoding, bufferSize, MODE_STATIC, audioSessionId); }
Example #24
Source File: AudioManagerAndroid.java From android-chromium with BSD 2-Clause "Simplified" License | 5 votes |
/** * Returns the minimum frame size required for audio output. * * @param sampleRate sampling rate * @param channels number of channels */ @CalledByNative private static int getMinOutputFrameSize(int sampleRate, int channels) { int channelConfig; if (channels == 1) { channelConfig = AudioFormat.CHANNEL_OUT_MONO; } else if (channels == 2) { channelConfig = AudioFormat.CHANNEL_OUT_STEREO; } else { return -1; } return AudioTrack.getMinBufferSize( sampleRate, channelConfig, AudioFormat.ENCODING_PCM_16BIT) / 2 / channels; }
Example #25
Source File: AudioEncoder.java From VideoRecorder with Apache License 2.0 | 5 votes |
@Override protected MediaCodec createEncoder() throws IOException { LogUtil.logd(TAG, "createEncoder"); final MediaFormat audioFormat = MediaFormat.createAudioFormat(MIME_TYPE, mSampleRate, mChannelCount); audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC); audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, mChannelCount == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO); audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate); audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, mChannelCount); LogUtil.loge(TAG, "format: " + audioFormat); MediaCodec encoder = MediaCodec.createEncoderByType(MIME_TYPE); encoder.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); encoder.start(); LogUtil.logd(TAG, "createEncoder finishing"); return encoder; }
Example #26
Source File: SaidItService.java From echo with GNU General Public License v3.0 | 5 votes |
synchronized private void innerStartListening() { switch(state) { case STATE_READY: break; case STATE_LISTENING: case STATE_RECORDING: return; } state = STATE_LISTENING; Log.d(TAG, "STARTING LISTENING"); Notification note = new Notification( 0, null, System.currentTimeMillis() ); note.flags |= Notification.FLAG_NO_CLEAR; startForeground( 42, note ); audioThread = new HandlerThread("audioThread", Thread.MAX_PRIORITY); audioThread.start(); audioHandler = new Handler(audioThread.getLooper()); audioHandler.post(new Runnable() { @Override public void run() { audioRecord = new AudioRecord( MediaRecorder.AudioSource.MIC, SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, 44100 * 5 * 2); // five seconds in bytes audioRecord.startRecording(); } }); audioHandler.post(audioReader); }
Example #27
Source File: RecordAudioTest.java From AndPermission with Apache License 2.0 | 5 votes |
public static int[] findAudioParameters() { for (int rate : RATES) { for (int channel : new int[]{AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_IN_STEREO}) { for (int format : new int[]{AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT}) { int buffer = AudioRecord.getMinBufferSize(rate, channel, format); if (buffer != AudioRecord.ERROR_BAD_VALUE) { return new int[]{rate, channel, format, buffer}; } } } } return null; }
Example #28
Source File: MediaPlayer.java From OTTLivePlayer_vlc with MIT License | 5 votes |
private boolean isEncoded(int encoding) { switch (encoding) { case AudioFormat.ENCODING_AC3: case AudioFormat.ENCODING_E_AC3: case 14 /* AudioFormat.ENCODING_DOLBY_TRUEHD */: case AudioFormat.ENCODING_DTS: case AudioFormat.ENCODING_DTS_HD: return true; default: return false; } }
Example #29
Source File: RecordAudioTester.java From PermissionAgent with Apache License 2.0 | 5 votes |
private static AudioRecord findAudioRecord() { for (int rate : RATES) { for (short format : new short[] {AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT}) { for (short channel : new short[] {AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_IN_STEREO}) { int buffer = AudioRecord.getMinBufferSize(rate, channel, format); if (buffer != AudioRecord.ERROR_BAD_VALUE) { AudioRecord recorder = new AudioRecord(MediaRecorder.AudioSource.MIC, rate, channel, format, buffer); if (recorder.getState() == AudioRecord.STATE_INITIALIZED) return recorder; } } } } return null; }
Example #30
Source File: AudioPusher.java From LivePublisher with MIT License | 5 votes |
public AudioPusher(AudioParam param, PusherNative pusherNative) { super(pusherNative); mParam = param; // int channel = mParam.getChannel() == 1 ? AudioFormat.CHANNEL_IN_MONO // : AudioFormat.CHANNEL_IN_STEREO; minBufferSize = AudioRecord.getMinBufferSize(mParam.getSampleRate(), AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, mParam.getSampleRate(), AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, minBufferSize); mNative.setAudioOptions(mParam.getSampleRate(), mParam.getChannel()); Log.d(TAG, "audio input:" + mNative.getInputSamples()); }