Java Code Examples for android.media.AudioTrack#play()
The following examples show how to use
android.media.AudioTrack#play() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: AudioTrackPlayerImpl.java From dcs-sdk-java with Apache License 2.0 | 7 votes |
private AudioTrack createAudioTrack(int sampleRate) { int encoding = AudioFormat.ENCODING_PCM_16BIT; // 得到一个满足最小要求的缓冲区的大小 int minBufferSize = getMinBufferSize(sampleRate, mChannelConfig, encoding); Log.d(TAG, "Decoder-AudioTrack-minBufferSize=" + minBufferSize); AudioTrack audioTrack = new AudioTrack(mStreamType, sampleRate, mChannelConfig, encoding, minBufferSize, AudioTrack.MODE_STREAM); audioTrack.play(); return audioTrack; }
Example 2
Source File: BlockingAudioTrack.java From android_9.0.0_r45 with Apache License 2.0 | 6 votes |
private static int writeToAudioTrack(AudioTrack audioTrack, byte[] bytes) { if (audioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING) { if (DBG) Log.d(TAG, "AudioTrack not playing, restarting : " + audioTrack.hashCode()); audioTrack.play(); } int count = 0; while (count < bytes.length) { // Note that we don't take bufferCopy.mOffset into account because // it is guaranteed to be 0. int written = audioTrack.write(bytes, count, bytes.length); if (written <= 0) { break; } count += written; } return count; }
Example 3
Source File: PWave.java From PHONK with GNU General Public License v3.0 | 6 votes |
public PWave(AppRunner appRunner) { super(appRunner); appRunner.whatIsRunning.add(this); // set the buffer size buffsize = AudioTrack.getMinBufferSize(mSampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT); samples = new short[buffsize]; // create an audiotrack object audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, mSampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, buffsize, AudioTrack.MODE_STREAM); // start audio audioTrack.play(); }
Example 4
Source File: AndroidAudioForJSyn.java From science-journal with Apache License 2.0 | 6 votes |
@Override public void start() { minBufferSize = AudioTrack.getMinBufferSize( frameRate, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT); System.out.println("Audio minBufferSize = " + minBufferSize); bufferSize = (3 * (minBufferSize / 2)) & ~3; System.out.println("Audio bufferSize = " + bufferSize); audioTrack = new AudioTrack( AudioManager.STREAM_MUSIC, frameRate, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT, bufferSize, AudioTrack.MODE_STREAM); audioTrack.play(); }
Example 5
Source File: StreamPlayer.java From android-sdk with Apache License 2.0 | 6 votes |
/** * Initialize AudioTrack by getting buffersize * * @param sampleRate the sample rate for the audio to be played */ private void initPlayer(int sampleRate) { synchronized (this) { int bufferSize = AudioTrack.getMinBufferSize( sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT); if (bufferSize == AudioTrack.ERROR_BAD_VALUE) { throw new RuntimeException("Could not determine buffer size for audio"); } audioTrack = new AudioTrack( AudioManager.STREAM_MUSIC, sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize, AudioTrack.MODE_STREAM ); audioTrack.play(); } }
Example 6
Source File: OpusTrack.java From DeviceConnect-Android with MIT License | 6 votes |
/** * 指定されたサンプリングレートとチャンネル数で AudioTrack を作成します. */ private void createAudioTrack() { int bufSize = AudioTrack.getMinBufferSize(mSamplingRate, mChannel == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT) * 2; if (DEBUG) { Log.d(TAG, "OpusTrack::createAudioTrack"); Log.d(TAG, " SamplingRate: " + mSamplingRate); Log.d(TAG, " Channels: " + mChannel); Log.d(TAG, " AudioFormat: " + AudioFormat.ENCODING_PCM_16BIT); Log.d(TAG, " BufSize: " + bufSize); } mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, mSamplingRate, mChannel == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT, bufSize, AudioTrack.MODE_STREAM); mAudioTrack.play(); }
Example 7
Source File: TTSUtility.java From speech-android-sdk with Apache License 2.0 | 6 votes |
private void initPlayer(){ stopTtsPlayer(); // IMPORTANT: minimum required buffer size for the successful creation of an AudioTrack instance in streaming mode. int bufferSize = AudioTrack.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT); synchronized (this) { audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize, AudioTrack.MODE_STREAM); if (audioTrack != null) audioTrack.play(); } }
Example 8
Source File: AndroidAudioForJSyn.java From jsyn with Apache License 2.0 | 5 votes |
public void start() { Process.setThreadPriority(-5); minBufferSize = AudioTrack.getMinBufferSize(frameRate, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_FLOAT); System.out.println("Audio minBufferSize = " + minBufferSize); bufferSize = (3 * (minBufferSize / 2)) & ~3; System.out.println("Audio bufferSize = " + bufferSize); audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, frameRate, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_FLOAT, bufferSize, AudioTrack.MODE_STREAM); audioTrack.play(); }
Example 9
Source File: AndroidAudioForJSyn.java From jsyn with Apache License 2.0 | 5 votes |
public void start() { Process.setThreadPriority(-5); minBufferSize = AudioTrack.getMinBufferSize(frameRate, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_FLOAT); System.out.println("Audio minBufferSize = " + minBufferSize); bufferSize = (3 * (minBufferSize / 2)) & ~3; System.out.println("Audio bufferSize = " + bufferSize); audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, frameRate, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_FLOAT, bufferSize, AudioTrack.MODE_STREAM); audioTrack.play(); }
Example 10
Source File: OpusPlayerActor.java From actor-platform with GNU Affero General Public License v3.0 | 5 votes |
protected void onPlayMessage(String fileName, float seek) { if (state != STATE_NONE) { destroyPlayer(); } state = STATE_NONE; currentFileName = fileName; int res = opusLib.openOpusFile(currentFileName); if (res == 0) { callback.onError(currentFileName); return; } duration = opusLib.getTotalPcmDuration(); offset = 0; try { bufferSize = AudioTrack.getMinBufferSize(48000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT); audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, 48000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize, AudioTrack.MODE_STREAM); audioTrack.play(); } catch (Exception e) { e.printStackTrace(); destroyPlayer(); callback.onError(currentFileName); return; } state = STATE_STARTED; if (seek != 0) { opusLib.seekOpusFile(seek); } callback.onStart(fileName); self().send(new Iterate()); }
Example 11
Source File: AudioDecoder.java From DeviceConnect-Android with MIT License | 5 votes |
/** * 指定されたサンプリングレートとチャンネル数で AudioTrack を作成します. */ void createAudioTrack() { int bufSize = AudioTrack.getMinBufferSize(mSamplingRate, mChannelCount == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT) * 2; mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, mSamplingRate, mChannelCount == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT, bufSize, AudioTrack.MODE_STREAM); mAudioTrack.play(); }
Example 12
Source File: AudioDecoder.java From DeviceConnect-Android with MIT License | 5 votes |
/** * 指定されたサンプリングレートとチャンネル数で AudioTrack を作成します. */ void createAudioTrack() { int bufSize = AudioTrack.getMinBufferSize(mSamplingRate, mChannelCount == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT) * 2; mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, mSamplingRate, mChannelCount == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT, bufSize, AudioTrack.MODE_STREAM); mAudioTrack.play(); }
Example 13
Source File: AndroidAudioPlayer.java From cythara with GNU General Public License v3.0 | 5 votes |
/** * Constructs a new AndroidAudioPlayer from an audio format, default buffer size and stream type. * * @param audioFormat The audio format of the stream that this AndroidAudioPlayer will process. * This can only be 1 channel, PCM 16 bit. * @param bufferSizeInSamples The requested buffer size in samples. * @param streamType The type of audio stream that the internal AudioTrack should use. For * example, {@link AudioManager#STREAM_MUSIC}. * @throws IllegalArgumentException if audioFormat is not valid or if the requested buffer size is invalid. * @see AudioTrack */ public AndroidAudioPlayer(TarsosDSPAudioFormat audioFormat, int bufferSizeInSamples, int streamType) { if (audioFormat.getChannels() != 1) { throw new IllegalArgumentException("TarsosDSP only supports mono audio channel count: " + audioFormat.getChannels()); } // The requested sample rate int sampleRate = (int) audioFormat.getSampleRate(); //The buffer size in bytes is twice the buffer size expressed in samples if 16bit samples are used: int bufferSizeInBytes = bufferSizeInSamples * audioFormat.getSampleSizeInBits()/8; // From the Android API about getMinBufferSize(): // The total size (in bytes) of the internal buffer where audio data is read from for playback. // If track's creation mode is MODE_STREAM, you can write data into this buffer in chunks less than or equal to this size, // and it is typical to use chunks of 1/2 of the total size to permit double-buffering. If the track's creation mode is MODE_STATIC, // this is the maximum length sample, or audio clip, that can be played by this instance. See getMinBufferSize(int, int, int) to determine // the minimum required buffer size for the successful creation of an AudioTrack instance in streaming mode. Using values smaller // than getMinBufferSize() will result in an initialization failure. int minBufferSizeInBytes = AudioTrack.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT); if(minBufferSizeInBytes > bufferSizeInBytes){ throw new IllegalArgumentException("The buffer size should be at least " + (minBufferSizeInBytes/(audioFormat.getSampleSizeInBits()/8)) + " (samples) according to AudioTrack.getMinBufferSize()."); } //http://developer.android.com/reference/android/media/AudioTrack.html#AudioTrack(int, int, int, int, int, int) audioTrack = new AudioTrack(streamType, sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes,AudioTrack.MODE_STREAM); audioTrack.play(); }
Example 14
Source File: MediaMoviePlayer.java From libcommon with Apache License 2.0 | 4 votes |
/** * @param source * @return first audio track index, -1 if not found */ @SuppressLint("NewApi") protected int internal_prepare_audio(final Object source) throws IOException { int trackindex = -1; mAudioMediaExtractor = new MediaExtractor(); if (source instanceof String) { mAudioMediaExtractor.setDataSource((String)source); } else if (source instanceof AssetFileDescriptor) { if (BuildCheck.isAndroid7()) { mVideoMediaExtractor.setDataSource((AssetFileDescriptor)source); } else { mVideoMediaExtractor.setDataSource(((AssetFileDescriptor)source).getFileDescriptor()); } } else { // ここには来ないけど throw new IllegalArgumentException("unknown source type:source=" + source); } trackindex = selectTrack(mAudioMediaExtractor, "audio/"); if (trackindex >= 0) { mAudioMediaExtractor.selectTrack(trackindex); final MediaFormat format = mAudioMediaExtractor.getTrackFormat(trackindex); mAudioChannels = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT); mAudioSampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE); final int min_buf_size = AudioTrack.getMinBufferSize(mAudioSampleRate, (mAudioChannels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO), AudioFormat.ENCODING_PCM_16BIT); final int max_input_size = format.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE); mAudioInputBufSize = min_buf_size > 0 ? min_buf_size * 4 : max_input_size; if (mAudioInputBufSize > max_input_size) mAudioInputBufSize = max_input_size; final int frameSizeInBytes = mAudioChannels * 2; mAudioInputBufSize = (mAudioInputBufSize / frameSizeInBytes) * frameSizeInBytes; if (DEBUG) Log.v(TAG, String.format("getMinBufferSize=%d,max_input_size=%d,mAudioInputBufSize=%d",min_buf_size, max_input_size, mAudioInputBufSize)); // mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, mAudioSampleRate, (mAudioChannels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO), AudioFormat.ENCODING_PCM_16BIT, mAudioInputBufSize, AudioTrack.MODE_STREAM); try { mAudioTrack.play(); } catch (final Exception e) { Log.e(TAG, "failed to start audio track playing", e); mAudioTrack.release(); mAudioTrack = null; } } return trackindex; }
Example 15
Source File: ListenActivity.java From protect-baby-monitor with GNU General Public License v3.0 | 4 votes |
private void streamAudio(final Socket socket) throws IllegalArgumentException, IllegalStateException, IOException { Log.i(TAG, "Setting up stream"); final int frequency = 11025; final int channelConfiguration = AudioFormat.CHANNEL_OUT_MONO; final int audioEncoding = AudioFormat.ENCODING_PCM_16BIT; final int bufferSize = AudioTrack.getMinBufferSize(frequency, channelConfiguration, audioEncoding); final int byteBufferSize = bufferSize*2; final AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, frequency, channelConfiguration, audioEncoding, bufferSize, AudioTrack.MODE_STREAM); setVolumeControlStream(AudioManager.STREAM_MUSIC); final InputStream is = socket.getInputStream(); int read = 0; audioTrack.play(); try { final byte [] buffer = new byte[byteBufferSize]; while(socket.isConnected() && read != -1 && Thread.currentThread().isInterrupted() == false) { read = is.read(buffer); if(read > 0) { audioTrack.write(buffer, 0, read); } } } finally { audioTrack.stop(); socket.close(); } }
Example 16
Source File: MainActivity.java From android-fskmodem with GNU General Public License v3.0 | 4 votes |
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); /// INIT FSK CONFIG try { mConfig = new FSKConfig(FSKConfig.SAMPLE_RATE_44100, FSKConfig.PCM_16BIT, FSKConfig.CHANNELS_MONO, FSKConfig.SOFT_MODEM_MODE_4, FSKConfig.THRESHOLD_20P); } catch (IOException e1) { e1.printStackTrace(); } /// INIT FSK DECODER mDecoder = new FSKDecoder(mConfig, new FSKDecoderCallback() { @Override public void decoded(byte[] newData) { final String text = new String(newData); runOnUiThread(new Runnable() { public void run() { TextView view = ((TextView) findViewById(R.id.result)); view.setText(view.getText()+text); } }); } }); /// INIT FSK ENCODER mEncoder = new FSKEncoder(mConfig, new FSKEncoderCallback() { @Override public void encoded(byte[] pcm8, short[] pcm16) { if (mConfig.pcmFormat == FSKConfig.PCM_8BIT) { //8bit buffer is populated, 16bit buffer is null mAudioTrack.write(pcm8, 0, pcm8.length); mDecoder.appendSignal(pcm8); } else if (mConfig.pcmFormat == FSKConfig.PCM_16BIT) { //16bit buffer is populated, 8bit buffer is null mAudioTrack.write(pcm16, 0, pcm16.length); mDecoder.appendSignal(pcm16); } } }); /// mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, mConfig.sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, 1024, AudioTrack.MODE_STREAM); mAudioTrack.play(); /// new Thread(mDataFeeder).start(); }
Example 17
Source File: MainActivity.java From android-fskmodem with GNU General Public License v3.0 | 4 votes |
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); /// INIT FSK CONFIG try { mConfig = new FSKConfig(FSKConfig.SAMPLE_RATE_44100, FSKConfig.PCM_16BIT, FSKConfig.CHANNELS_STEREO, FSKConfig.SOFT_MODEM_MODE_4, FSKConfig.THRESHOLD_20P); } catch (IOException e1) { e1.printStackTrace(); } /// INIT FSK DECODER mDecoder = new FSKDecoder(mConfig, new FSKDecoderCallback() { @Override public void decoded(byte[] newData) { final String text = new String(newData); runOnUiThread(new Runnable() { public void run() { TextView view = ((TextView) findViewById(R.id.result)); view.setText(view.getText()+text); } }); } }); /// INIT FSK ENCODER mEncoder = new FSKEncoder(mConfig, new FSKEncoderCallback() { @Override public void encoded(byte[] pcm8, short[] pcm16) { if (mConfig.pcmFormat == FSKConfig.PCM_8BIT) { //8bit buffer is populated, 16bit buffer is null mAudioTrack.write(pcm8, 0, pcm8.length); mDecoder.appendSignal(pcm8); } else if (mConfig.pcmFormat == FSKConfig.PCM_16BIT) { //16bit buffer is populated, 8bit buffer is null mAudioTrack.write(pcm16, 0, pcm16.length); mDecoder.appendSignal(pcm16); } } }); /// mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, mConfig.sampleRate, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT, 1024, AudioTrack.MODE_STREAM); mAudioTrack.play(); /// new Thread(mDataFeeder).start(); }
Example 18
Source File: MainActivity.java From android-fskmodem with GNU General Public License v3.0 | 4 votes |
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); /// INIT FSK CONFIG try { mConfig = new FSKConfig(FSKConfig.SAMPLE_RATE_44100, FSKConfig.PCM_8BIT, FSKConfig.CHANNELS_MONO, FSKConfig.SOFT_MODEM_MODE_4, FSKConfig.THRESHOLD_20P); } catch (IOException e1) { e1.printStackTrace(); } /// INIT FSK DECODER mDecoder = new FSKDecoder(mConfig, new FSKDecoderCallback() { @Override public void decoded(byte[] newData) { final String text = new String(newData); runOnUiThread(new Runnable() { public void run() { TextView view = ((TextView) findViewById(R.id.result)); view.setText(view.getText()+text); } }); } }); /// INIT FSK ENCODER mEncoder = new FSKEncoder(mConfig, new FSKEncoderCallback() { @Override public void encoded(byte[] pcm8, short[] pcm16) { if (mConfig.pcmFormat == FSKConfig.PCM_8BIT) { //8bit buffer is populated, 16bit buffer is null mAudioTrack.write(pcm8, 0, pcm8.length); mDecoder.appendSignal(pcm8); } else if (mConfig.pcmFormat == FSKConfig.PCM_16BIT) { //16bit buffer is populated, 8bit buffer is null mAudioTrack.write(pcm16, 0, pcm16.length); mDecoder.appendSignal(pcm16); } } }); /// mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, mConfig.sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_8BIT, 1024, AudioTrack.MODE_STREAM); mAudioTrack.play(); /// new Thread(mDataFeeder).start(); }
Example 19
Source File: EmbeddedAssistant.java From sample-googleassistant with Apache License 2.0 | 4 votes |
@Override public void onCompleted() { // create a new AudioTrack to workaround audio routing issues. AudioTrack audioTrack = new AudioTrack.Builder() .setAudioFormat(mAudioOutputFormat) .setBufferSizeInBytes(mAudioOutputBufferSize) .setTransferMode(AudioTrack.MODE_STREAM) .build(); if (mAudioOutputDevice != null) { audioTrack.setPreferredDevice(mAudioOutputDevice); } audioTrack.setVolume(AudioTrack.getMaxVolume() * mVolume / 100.0f); audioTrack.play(); mConversationHandler.post(new Runnable() { @Override public void run() { mConversationCallback.onResponseStarted(); } }); for (ByteBuffer audioData : mAssistantResponses) { final ByteBuffer buf = audioData; mConversationHandler.post(new Runnable() { @Override public void run() { mConversationCallback.onAudioSample(buf); } }); audioTrack.write(buf, buf.remaining(), AudioTrack.WRITE_BLOCKING); } mAssistantResponses.clear(); audioTrack.stop(); audioTrack.release(); mConversationHandler.post(new Runnable() { @Override public void run() { mConversationCallback.onResponseFinished(); } }); if (mMicrophoneMode == MicrophoneMode.DIALOG_FOLLOW_ON) { // Automatically start a new request startConversation(); } else { // The conversation is done mConversationHandler.post(new Runnable() { @Override public void run() { mConversationCallback.onConversationFinished(); } }); } }