Java Code Examples for android.media.AudioManager#getProperty()
The following examples show how to use
android.media.AudioManager#getProperty() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: AudioTest.java From walt with Apache License 2.0 | 6 votes |
AudioTest(Context context) { super(context); playbackRepetitions = getIntPreference(context, R.string.preference_audio_out_reps, 10); recordingRepetitions = getIntPreference(context, R.string.preference_audio_in_reps, 5); threshold = getIntPreference(context, R.string.preference_audio_in_threshold, 5000); //Check for optimal output sample rate and buffer size AudioManager am = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE); String frameRateStr = am.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE); String framesPerBufferStr = am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER); logger.log("Optimal frame rate is: " + frameRateStr); logger.log("Optimal frames per buffer is: " + framesPerBufferStr); //Convert to ints frameRate = Integer.parseInt(frameRateStr); framesPerBuffer = Integer.parseInt(framesPerBufferStr); //Create the audio engine createEngine(); createBufferQueueAudioPlayer(frameRate, framesPerBuffer); logger.log("Audio engine created"); }
Example 2
Source File: AudioManagerAndroid.java From droidkit-webrtc with BSD 3-Clause "New" or "Revised" License | 6 votes |
@SuppressWarnings("unused") private AudioManagerAndroid(Context context) { AudioManager audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE); mNativeOutputSampleRate = DEFAULT_SAMPLING_RATE; mAudioLowLatencyOutputFrameSize = DEFAULT_FRAMES_PER_BUFFER; if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.JELLY_BEAN_MR1) { String sampleRateString = audioManager.getProperty( AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE); if (sampleRateString != null) { mNativeOutputSampleRate = Integer.parseInt(sampleRateString); } String framesPerBuffer = audioManager.getProperty( AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER); if (framesPerBuffer != null) { mAudioLowLatencyOutputFrameSize = Integer.parseInt(framesPerBuffer); } } mAudioLowLatencySupported = context.getPackageManager().hasSystemFeature( PackageManager.FEATURE_AUDIO_LOW_LATENCY); }
Example 3
Source File: AudioManagerAndroid.java From webrtc-app-mono with BSD 3-Clause "New" or "Revised" License | 6 votes |
@SuppressWarnings("unused") private AudioManagerAndroid(Context context) { AudioManager audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE); mNativeOutputSampleRate = DEFAULT_SAMPLING_RATE; mAudioLowLatencyOutputFrameSize = DEFAULT_FRAMES_PER_BUFFER; if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.JELLY_BEAN_MR1) { String sampleRateString = audioManager.getProperty( AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE); if (sampleRateString != null) { mNativeOutputSampleRate = Integer.parseInt(sampleRateString); } String framesPerBuffer = audioManager.getProperty( AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER); if (framesPerBuffer != null) { mAudioLowLatencyOutputFrameSize = Integer.parseInt(framesPerBuffer); } } mAudioLowLatencySupported = context.getPackageManager().hasSystemFeature( PackageManager.FEATURE_AUDIO_LOW_LATENCY); }
Example 4
Source File: WebRtcAudioManager.java From webrtc_android with MIT License | 5 votes |
private static int getSampleRateForApiLevel(AudioManager audioManager) { if (Build.VERSION.SDK_INT < 17) { return DEFAULT_SAMPLE_RATE_HZ; } String sampleRateString = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE); return (sampleRateString == null) ? DEFAULT_SAMPLE_RATE_HZ : Integer.parseInt(sampleRateString); }
Example 5
Source File: WebRtcAudioManager.java From webrtc_android with MIT License | 5 votes |
private static int getLowLatencyFramesPerBuffer(AudioManager audioManager) { if (Build.VERSION.SDK_INT < 17) { return DEFAULT_FRAME_PER_BUFFER; } String framesPerBuffer = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER); return framesPerBuffer == null ? DEFAULT_FRAME_PER_BUFFER : Integer.parseInt(framesPerBuffer); }
Example 6
Source File: VoIPBaseService.java From TelePlus-Android with GNU General Public License v2.0 | 4 votes |
@Override public void onCreate() { super.onCreate(); if (BuildVars.LOGS_ENABLED) { FileLog.d("=============== VoIPService STARTING ==============="); } AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1 && am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER)!=null) { int outFramesPerBuffer = Integer.parseInt(am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER)); VoIPController.setNativeBufferSize(outFramesPerBuffer); } else { VoIPController.setNativeBufferSize(AudioTrack.getMinBufferSize(48000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT) / 2); } try { cpuWakelock = ((PowerManager) getSystemService(POWER_SERVICE)).newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, "telegram-voip"); cpuWakelock.acquire(); btAdapter=am.isBluetoothScoAvailableOffCall() ? BluetoothAdapter.getDefaultAdapter() : null; IntentFilter filter = new IntentFilter(); filter.addAction(ConnectivityManager.CONNECTIVITY_ACTION); if(!USE_CONNECTION_SERVICE){ filter.addAction(ACTION_HEADSET_PLUG); if(btAdapter!=null){ filter.addAction(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED); filter.addAction(AudioManager.ACTION_SCO_AUDIO_STATE_UPDATED); } filter.addAction(TelephonyManager.ACTION_PHONE_STATE_CHANGED); } registerReceiver(receiver, filter); soundPool = new SoundPool(1, AudioManager.STREAM_VOICE_CALL, 0); spConnectingId = soundPool.load(this, R.raw.voip_connecting, 1); spRingbackID = soundPool.load(this, R.raw.voip_ringback, 1); spFailedID = soundPool.load(this, R.raw.voip_failed, 1); spEndId = soundPool.load(this, R.raw.voip_end, 1); spBusyId = soundPool.load(this, R.raw.voip_busy, 1); am.registerMediaButtonEventReceiver(new ComponentName(this, VoIPMediaButtonReceiver.class)); if(!USE_CONNECTION_SERVICE && btAdapter!=null && btAdapter.isEnabled()){ int headsetState=btAdapter.getProfileConnectionState(BluetoothProfile.HEADSET); updateBluetoothHeadsetState(headsetState==BluetoothProfile.STATE_CONNECTED); //if(headsetState==BluetoothProfile.STATE_CONNECTED) // am.setBluetoothScoOn(true); for (StateListener l : stateListeners) l.onAudioSettingsChanged(); } } catch (Exception x) { if (BuildVars.LOGS_ENABLED) { FileLog.e("error initializing voip controller", x); } callFailed(); } }
Example 7
Source File: SnapclientService.java From snapdroid with GNU General Public License v3.0 | 4 votes |
private void startProcess() throws IOException { Log.d(TAG, "startProcess"); String player = "oboe"; String configuredEngine = Settings.getInstance(getApplicationContext()).getAudioEngine(); if (configuredEngine.equals("OpenSL")) player = "opensl"; else if (configuredEngine.equals("Oboe")) player = "oboe"; else { if (Build.VERSION.SDK_INT < Build.VERSION_CODES.O) player = "opensl"; else player = "oboe"; } String rate = null; String fpb = null; String sampleformat = "*:16:*"; AudioManager audioManager = (AudioManager) this.getSystemService(Context.AUDIO_SERVICE); if ((Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) && Settings.getInstance(getApplicationContext()).doResample()) { rate = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE); fpb = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER); sampleformat = rate + ":16:*"; } Log.i(TAG, "Configured engine: " + configuredEngine + ", active engine: " + player + ", sampleformat: " + sampleformat); ProcessBuilder pb = new ProcessBuilder() .command(this.getApplicationInfo().nativeLibraryDir + "/libsnapclient.so", "-h", host, "-p", Integer.toString(port), "--hostID", getUniqueId(this.getApplicationContext()), "--player", player, "--sampleformat", sampleformat, "--logfilter", "*:info,Stats:debug") .redirectErrorStream(true); Map<String, String> env = pb.environment(); if (rate != null) env.put("SAMPLE_RATE", rate); if (fpb != null) env.put("FRAMES_PER_BUFFER", fpb); process = pb.start(); Thread reader = new Thread(new Runnable() { @Override public void run() { BufferedReader bufferedReader = new BufferedReader( new InputStreamReader(process.getInputStream())); String line; try { while ((line = bufferedReader.readLine()) != null) { log(line); } } catch (IOException e) { e.printStackTrace(); } } }); logReceived = false; reader.start(); }
Example 8
Source File: VoIPBaseService.java From TelePlus-Android with GNU General Public License v2.0 | 4 votes |
@Override public void onCreate() { super.onCreate(); if (BuildVars.LOGS_ENABLED) { FileLog.d("=============== VoIPService STARTING ==============="); } AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1 && am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER)!=null) { int outFramesPerBuffer = Integer.parseInt(am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER)); VoIPController.setNativeBufferSize(outFramesPerBuffer); } else { VoIPController.setNativeBufferSize(AudioTrack.getMinBufferSize(48000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT) / 2); } try { cpuWakelock = ((PowerManager) getSystemService(POWER_SERVICE)).newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, "telegram-voip"); cpuWakelock.acquire(); btAdapter=am.isBluetoothScoAvailableOffCall() ? BluetoothAdapter.getDefaultAdapter() : null; IntentFilter filter = new IntentFilter(); filter.addAction(ConnectivityManager.CONNECTIVITY_ACTION); if(!USE_CONNECTION_SERVICE){ filter.addAction(ACTION_HEADSET_PLUG); if(btAdapter!=null){ filter.addAction(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED); filter.addAction(AudioManager.ACTION_SCO_AUDIO_STATE_UPDATED); } filter.addAction(TelephonyManager.ACTION_PHONE_STATE_CHANGED); } registerReceiver(receiver, filter); soundPool = new SoundPool(1, AudioManager.STREAM_VOICE_CALL, 0); spConnectingId = soundPool.load(this, R.raw.voip_connecting, 1); spRingbackID = soundPool.load(this, R.raw.voip_ringback, 1); spFailedID = soundPool.load(this, R.raw.voip_failed, 1); spEndId = soundPool.load(this, R.raw.voip_end, 1); spBusyId = soundPool.load(this, R.raw.voip_busy, 1); am.registerMediaButtonEventReceiver(new ComponentName(this, VoIPMediaButtonReceiver.class)); if(!USE_CONNECTION_SERVICE && btAdapter!=null && btAdapter.isEnabled()){ int headsetState=btAdapter.getProfileConnectionState(BluetoothProfile.HEADSET); updateBluetoothHeadsetState(headsetState==BluetoothProfile.STATE_CONNECTED); //if(headsetState==BluetoothProfile.STATE_CONNECTED) // am.setBluetoothScoOn(true); for (StateListener l : stateListeners) l.onAudioSettingsChanged(); } } catch (Exception x) { if (BuildVars.LOGS_ENABLED) { FileLog.e("error initializing voip controller", x); } callFailed(); } }
Example 9
Source File: MidiSynthDeviceService.java From android-midisuite with Apache License 2.0 | 4 votes |
public void queryOptimalAudioSettings() { AudioManager audioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE); String text = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER); int framesPerBlock = Integer.parseInt(text); mSynthEngine.setFramesPerBlock(framesPerBlock); }
Example 10
Source File: VoIPBaseService.java From Telegram-FOSS with GNU General Public License v2.0 | 4 votes |
@Override public void onCreate() { super.onCreate(); if (BuildVars.LOGS_ENABLED) { FileLog.d("=============== VoIPService STARTING ==============="); } try { AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1 && am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER)!=null) { int outFramesPerBuffer = Integer.parseInt(am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER)); TgVoip.setBufferSize(outFramesPerBuffer); } else { TgVoip.setBufferSize(AudioTrack.getMinBufferSize(48000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT) / 2); } cpuWakelock = ((PowerManager) getSystemService(POWER_SERVICE)).newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, "telegram-voip"); cpuWakelock.acquire(); btAdapter=am.isBluetoothScoAvailableOffCall() ? BluetoothAdapter.getDefaultAdapter() : null; IntentFilter filter = new IntentFilter(); filter.addAction(ConnectivityManager.CONNECTIVITY_ACTION); if(!USE_CONNECTION_SERVICE){ filter.addAction(ACTION_HEADSET_PLUG); if(btAdapter!=null){ filter.addAction(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED); filter.addAction(AudioManager.ACTION_SCO_AUDIO_STATE_UPDATED); } filter.addAction(TelephonyManager.ACTION_PHONE_STATE_CHANGED); } registerReceiver(receiver, filter); soundPool = new SoundPool(1, AudioManager.STREAM_VOICE_CALL, 0); spConnectingId = soundPool.load(this, R.raw.voip_connecting, 1); spRingbackID = soundPool.load(this, R.raw.voip_ringback, 1); spFailedID = soundPool.load(this, R.raw.voip_failed, 1); spEndId = soundPool.load(this, R.raw.voip_end, 1); spBusyId = soundPool.load(this, R.raw.voip_busy, 1); am.registerMediaButtonEventReceiver(new ComponentName(this, VoIPMediaButtonReceiver.class)); if(!USE_CONNECTION_SERVICE && btAdapter!=null && btAdapter.isEnabled()){ int headsetState=btAdapter.getProfileConnectionState(BluetoothProfile.HEADSET); updateBluetoothHeadsetState(headsetState==BluetoothProfile.STATE_CONNECTED); //if(headsetState==BluetoothProfile.STATE_CONNECTED) // am.setBluetoothScoOn(true); for (StateListener l : stateListeners) l.onAudioSettingsChanged(); } } catch (Exception x) { if (BuildVars.LOGS_ENABLED) { FileLog.e("error initializing voip controller", x); } callFailed(); } }
Example 11
Source File: VoIPBaseService.java From Telegram with GNU General Public License v2.0 | 4 votes |
@Override public void onCreate() { super.onCreate(); if (BuildVars.LOGS_ENABLED) { FileLog.d("=============== VoIPService STARTING ==============="); } try { AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1 && am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER)!=null) { int outFramesPerBuffer = Integer.parseInt(am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER)); TgVoip.setBufferSize(outFramesPerBuffer); } else { TgVoip.setBufferSize(AudioTrack.getMinBufferSize(48000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT) / 2); } cpuWakelock = ((PowerManager) getSystemService(POWER_SERVICE)).newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, "telegram-voip"); cpuWakelock.acquire(); btAdapter=am.isBluetoothScoAvailableOffCall() ? BluetoothAdapter.getDefaultAdapter() : null; IntentFilter filter = new IntentFilter(); filter.addAction(ConnectivityManager.CONNECTIVITY_ACTION); if(!USE_CONNECTION_SERVICE){ filter.addAction(ACTION_HEADSET_PLUG); if(btAdapter!=null){ filter.addAction(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED); filter.addAction(AudioManager.ACTION_SCO_AUDIO_STATE_UPDATED); } filter.addAction(TelephonyManager.ACTION_PHONE_STATE_CHANGED); } registerReceiver(receiver, filter); soundPool = new SoundPool(1, AudioManager.STREAM_VOICE_CALL, 0); spConnectingId = soundPool.load(this, R.raw.voip_connecting, 1); spRingbackID = soundPool.load(this, R.raw.voip_ringback, 1); spFailedID = soundPool.load(this, R.raw.voip_failed, 1); spEndId = soundPool.load(this, R.raw.voip_end, 1); spBusyId = soundPool.load(this, R.raw.voip_busy, 1); am.registerMediaButtonEventReceiver(new ComponentName(this, VoIPMediaButtonReceiver.class)); if(!USE_CONNECTION_SERVICE && btAdapter!=null && btAdapter.isEnabled()){ int headsetState=btAdapter.getProfileConnectionState(BluetoothProfile.HEADSET); updateBluetoothHeadsetState(headsetState==BluetoothProfile.STATE_CONNECTED); //if(headsetState==BluetoothProfile.STATE_CONNECTED) // am.setBluetoothScoOn(true); for (StateListener l : stateListeners) l.onAudioSettingsChanged(); } } catch (Exception x) { if (BuildVars.LOGS_ENABLED) { FileLog.e("error initializing voip controller", x); } callFailed(); } }