org.webrtc.Logging Java Examples
The following examples show how to use
org.webrtc.Logging.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: WebRtcAudioManager.java From webrtc_android with MIT License | 6 votes |
private int getNativeOutputSampleRate() { // Override this if we're running on an old emulator image which only // supports 8 kHz and doesn't support PROPERTY_OUTPUT_SAMPLE_RATE. if (WebRtcAudioUtils.runningOnEmulator()) { Logging.d(TAG, "Running emulator, overriding sample rate to 8 kHz."); return 8000; } // Default can be overriden by WebRtcAudioUtils.setDefaultSampleRateHz(). // If so, use that value and return here. if (WebRtcAudioUtils.isDefaultSampleRateOverridden()) { Logging.d(TAG, "Default sample rate is overriden to " + WebRtcAudioUtils.getDefaultSampleRateHz() + " Hz"); return WebRtcAudioUtils.getDefaultSampleRateHz(); } // No overrides available. Deliver best possible estimate based on default // Android AudioManager APIs. final int sampleRateHz = getSampleRateForApiLevel(); Logging.d(TAG, "Sample rate is set to " + sampleRateHz + " Hz"); return sampleRateHz; }
Example #2
Source File: WebRtcAudioRecord.java From webrtc_android with MIT License | 6 votes |
private boolean startRecording() { Logging.d(TAG, "startRecording"); assertTrue(audioRecord != null); assertTrue(audioThread == null); try { audioRecord.startRecording(); } catch (IllegalStateException e) { reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_EXCEPTION, "AudioRecord.startRecording failed: " + e.getMessage()); return false; } if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) { reportWebRtcAudioRecordStartError( AudioRecordStartErrorCode.AUDIO_RECORD_START_STATE_MISMATCH, "AudioRecord.startRecording failed - incorrect state :" + audioRecord.getRecordingState()); return false; } audioThread = new AudioRecordThread("AudioRecordJavaThread"); audioThread.start(); return true; }
Example #3
Source File: WebRtcAudioRecord.java From webrtc_android with MIT License | 6 votes |
@CalledByNative private boolean startRecording() { Logging.d(TAG, "startRecording"); assertTrue(audioRecord != null); assertTrue(audioThread == null); try { audioRecord.startRecording(); } catch (IllegalStateException e) { reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_EXCEPTION, "AudioRecord.startRecording failed: " + e.getMessage()); return false; } if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) { reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_STATE_MISMATCH, "AudioRecord.startRecording failed - incorrect state :" + audioRecord.getRecordingState()); return false; } audioThread = new AudioRecordThread("AudioRecordJavaThread"); audioThread.start(); return true; }
Example #4
Source File: Mp4Recorder.java From VideoCRE with MIT License | 6 votes |
@Override public void onOutputFormatChanged(final MediaCodec codec, final MediaFormat format) { if (mMuxerStarted) { throw new RuntimeException("format changed twice"); } String name = format.getString(MediaFormat.KEY_MIME); int width = format.getInteger(MediaFormat.KEY_WIDTH); int height = format.getInteger(MediaFormat.KEY_HEIGHT); Logging.d(TAG, "onOutputFormatChanged " + name + " " + width + "x" + height); mTrackIndex = mMediaMuxer.addTrack(format); mMediaMuxer.start(); mMuxerStarted = true; }
Example #5
Source File: WebRtcAudioTrack.java From webrtc_android with MIT License | 6 votes |
@CalledByNative private boolean stopPlayout() { threadChecker.checkIsOnValidThread(); volumeLogger.stop(); Logging.d(TAG, "stopPlayout"); assertTrue(audioThread != null); logUnderrunCount(); audioThread.stopThread(); Logging.d(TAG, "Stopping the AudioTrackThread..."); audioThread.interrupt(); if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_TRACK_THREAD_JOIN_TIMEOUT_MS)) { Logging.e(TAG, "Join of AudioTrackThread timed out."); WebRtcAudioUtils.logAudioState(TAG, context, audioManager); } Logging.d(TAG, "AudioTrackThread has now been stopped."); audioThread = null; releaseAudioResources(); return true; }
Example #6
Source File: WebRtcAudioUtils.java From webrtc_android with MIT License | 6 votes |
private static void logAudioStateVolume(String tag, AudioManager audioManager) { final int[] streams = {AudioManager.STREAM_VOICE_CALL, AudioManager.STREAM_MUSIC, AudioManager.STREAM_RING, AudioManager.STREAM_ALARM, AudioManager.STREAM_NOTIFICATION, AudioManager.STREAM_SYSTEM}; Logging.d(tag, "Audio State: "); // Some devices may not have volume controls and might use a fixed volume. boolean fixedVolume = isVolumeFixed(audioManager); Logging.d(tag, " fixed volume=" + fixedVolume); if (!fixedVolume) { for (int stream : streams) { StringBuilder info = new StringBuilder(); info.append(" " + streamTypeToString(stream) + ": "); info.append("volume=").append(audioManager.getStreamVolume(stream)); info.append(", max=").append(audioManager.getStreamMaxVolume(stream)); logIsStreamMute(tag, audioManager, stream, info); Logging.d(tag, info.toString()); } } }
Example #7
Source File: TrackWindowMgr.java From QNRTC-Android with Apache License 2.0 | 6 votes |
public void addTrackInfo(String userId, List<QNTrackInfo> trackInfoList) { if (mTrackCandidateWins.size() == 0) { Logging.e(TAG, "There were more than 9 published users in the room, with no unUsedWindow to draw."); return; } UserTrackView userTrackView = mUserWindowMap.get(userId); if (userTrackView != null) { // user has already displayed in screen userTrackView.onAddTrackInfo(trackInfoList); } else { // allocate new track windows userTrackView = mTrackCandidateWins.remove(0); mUserWindowMap.put(userId, userTrackView, userId.equals(mCurrentUserId)); userTrackView.setUserTrackInfo(mEngine, userId, trackInfoList); userTrackView.changeViewBackgroundByPos(mUserWindowMap.size()); userTrackView.setVisibility(View.VISIBLE); // update whole layout updateTrackWindowsLayout(); } }
Example #8
Source File: WebRtcAudioRecord.java From webrtc_android with MIT License | 5 votes |
private void logMainParametersExtended() { if (Build.VERSION.SDK_INT >= 23) { Logging.d(TAG, "AudioRecord: " // The frame count of the native AudioRecord buffer. + "buffer size in frames: " + audioRecord.getBufferSizeInFrames()); } }
Example #9
Source File: WebRtcAudioRecord.java From webrtc_android with MIT License | 5 votes |
private void reportWebRtcAudioRecordStartError( AudioRecordStartErrorCode errorCode, String errorMessage) { Logging.e(TAG, "Start recording error: " + errorCode + ". " + errorMessage); WebRtcAudioUtils.logAudioState(TAG); if (errorCallback != null) { errorCallback.onWebRtcAudioRecordStartError(errorCode, errorMessage); } }
Example #10
Source File: WebRtcAudioTrack.java From webrtc_android with MIT License | 5 votes |
private void logBufferSizeInFrames() { if (Build.VERSION.SDK_INT >= 23) { Logging.d(TAG, "AudioTrack: " // The effective size of the AudioTrack buffer that the app writes to. + "buffer size in frames: " + audioTrack.getBufferSizeInFrames()); } }
Example #11
Source File: JavaAudioDeviceModule.java From webrtc_android with MIT License | 5 votes |
/** * Control if the built-in HW acoustic echo canceler should be used or not. The default is on if * it is supported. It is possible to query support by calling * isBuiltInAcousticEchoCancelerSupported(). */ // 设置是否开启内置回声消除,如果支持,默认打开 public Builder setUseHardwareAcousticEchoCanceler(boolean useHardwareAcousticEchoCanceler) { if (useHardwareAcousticEchoCanceler && !isBuiltInAcousticEchoCancelerSupported()) { Logging.e(TAG, "HW AEC not supported"); useHardwareAcousticEchoCanceler = false; } this.useHardwareAcousticEchoCanceler = useHardwareAcousticEchoCanceler; return this; }
Example #12
Source File: WebRtcAudioTrack.java From webrtc_android with MIT License | 5 votes |
/** Get current volume level for a phone call audio stream. */ @CalledByNative private int getStreamVolume() { threadChecker.checkIsOnValidThread(); Logging.d(TAG, "getStreamVolume"); return audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL); }
Example #13
Source File: WebRtcAudioTrack.java From webrtc_android with MIT License | 5 votes |
@CalledByNative private boolean setStreamVolume(int volume) { threadChecker.checkIsOnValidThread(); Logging.d(TAG, "setStreamVolume(" + volume + ")"); if (isVolumeFixed()) { Logging.e(TAG, "The device implements a fixed volume policy."); return false; } audioManager.setStreamVolume(AudioManager.STREAM_VOICE_CALL, volume, 0); return true; }
Example #14
Source File: VolumeLogger.java From webrtc_android with MIT License | 5 votes |
@Override public void run() { final int mode = audioManager.getMode(); if (mode == AudioManager.MODE_RINGTONE) { Logging.d(TAG, "STREAM_RING stream volume: " + audioManager.getStreamVolume(AudioManager.STREAM_RING) + " (max=" + maxRingVolume + ")"); } else if (mode == AudioManager.MODE_IN_COMMUNICATION) { Logging.d(TAG, "VOICE_CALL stream volume: " + audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL) + " (max=" + maxVoiceCallVolume + ")"); } }
Example #15
Source File: WebRtcAudioTrack.java From webrtc_android with MIT License | 5 votes |
@CalledByNative private boolean startPlayout() { threadChecker.checkIsOnValidThread(); volumeLogger.start(); Logging.d(TAG, "startPlayout"); assertTrue(audioTrack != null); assertTrue(audioThread == null); // Starts playing an audio track. try { audioTrack.play(); } catch (IllegalStateException e) { reportWebRtcAudioTrackStartError(AudioTrackStartErrorCode.AUDIO_TRACK_START_EXCEPTION, "AudioTrack.play failed: " + e.getMessage()); releaseAudioResources(); return false; } if (audioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING) { reportWebRtcAudioTrackStartError(AudioTrackStartErrorCode.AUDIO_TRACK_START_STATE_MISMATCH, "AudioTrack.play failed - incorrect state :" + audioTrack.getPlayState()); releaseAudioResources(); return false; } // Create and start new high-priority thread which calls AudioTrack.write() // and where we also call the native nativeGetPlayoutData() callback to // request decoded audio from WebRTC. audioThread = new AudioTrackThread("AudioTrackJavaThread"); audioThread.start(); return true; }
Example #16
Source File: WebRtcAudioRecord.java From webrtc_android with MIT License | 5 votes |
private void logMainParametersExtended() { if (Build.VERSION.SDK_INT >= 23) { Logging.d(TAG, "AudioRecord: " // The frame count of the native AudioRecord buffer. + "buffer size in frames: " + audioRecord.getBufferSizeInFrames()); } }
Example #17
Source File: CallActivity.java From sample-videoRTC with Apache License 2.0 | 5 votes |
@Override synchronized public void renderFrame(VideoRenderer.I420Frame frame) { if (target == null) { Logging.d(TAG, "Dropping frame in proxy because target is null."); VideoRenderer.renderFrameDone(frame); return; } target.renderFrame(frame); }
Example #18
Source File: JavaAudioDeviceModule.java From webrtc_android with MIT License | 5 votes |
/** * Construct an AudioDeviceModule based on the supplied arguments. The caller takes ownership * and is responsible for calling release(). */ public AudioDeviceModule createAudioDeviceModule() { Logging.d(TAG, "createAudioDeviceModule"); if (useHardwareNoiseSuppressor) { Logging.d(TAG, "HW NS will be used."); } else { if (isBuiltInNoiseSuppressorSupported()) { Logging.d(TAG, "Overriding default behavior; now using WebRTC NS!"); } Logging.d(TAG, "HW NS will not be used."); } if (useHardwareAcousticEchoCanceler) { Logging.d(TAG, "HW AEC will be used."); } else { if (isBuiltInAcousticEchoCancelerSupported()) { Logging.d(TAG, "Overriding default behavior; now using WebRTC AEC!"); } Logging.d(TAG, "HW AEC will not be used."); } final WebRtcAudioRecord audioInput = new WebRtcAudioRecord(context, audioManager, audioSource, audioFormat, audioRecordErrorCallback, samplesReadyCallback, useHardwareAcousticEchoCanceler, useHardwareNoiseSuppressor); final WebRtcAudioTrack audioOutput = new WebRtcAudioTrack(context, audioManager, audioTrackErrorCallback); return new JavaAudioDeviceModule(context, audioManager, audioInput, audioOutput, inputSampleRate, outputSampleRate, useStereoInput, useStereoOutput); }
Example #19
Source File: CallActivity.java From sample-videoRTC with Apache License 2.0 | 5 votes |
@Override synchronized public void onFrame(VideoFrame frame) { if (target == null) { Logging.d(TAG, "Dropping frame in proxy because target is null."); return; } target.onFrame(frame); }
Example #20
Source File: WebRtcAudioRecord.java From webrtc_android with MIT License | 5 votes |
private boolean stopRecording() { Logging.d(TAG, "stopRecording"); assertTrue(audioThread != null); audioThread.stopThread(); if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS)) { Logging.e(TAG, "Join of AudioRecordJavaThread timed out"); WebRtcAudioUtils.logAudioState(TAG); } audioThread = null; if (effects != null) { effects.release(); } releaseAudioResources(); return true; }
Example #21
Source File: WebRtcAudioRecord.java From webrtc_android with MIT License | 5 votes |
private boolean enableBuiltInNS(boolean enable) { Logging.d(TAG, "enableBuiltInNS(" + enable + ')'); if (effects == null) { Logging.e(TAG, "Built-in NS is not supported on this platform"); return false; } return effects.setNS(enable); }
Example #22
Source File: WebRtcAudioTrack.java From webrtc_android with MIT License | 5 votes |
private void reportWebRtcAudioTrackInitError(String errorMessage) { Logging.e(TAG, "Init playout error: " + errorMessage); WebRtcAudioUtils.logAudioState(TAG, context, audioManager); if (errorCallback != null) { errorCallback.onWebRtcAudioTrackInitError(errorMessage); } }
Example #23
Source File: WebRtcAudioEffects.java From webrtc_android with MIT License | 5 votes |
public boolean setNS(boolean enable) { Logging.d(TAG, "setNS(" + enable + ")"); if (!isNoiseSuppressorSupported()) { Logging.w(TAG, "Platform NS is not supported"); shouldEnableNs = false; return false; } if (ns != null && (enable != shouldEnableNs)) { Logging.e(TAG, "Platform NS state can't be modified while recording"); return false; } shouldEnableNs = enable; return true; }
Example #24
Source File: WebRtcAudioUtils.java From webrtc_android with MIT License | 5 votes |
private static void logAudioDeviceInfo(String tag, AudioManager audioManager) { if (Build.VERSION.SDK_INT < 23) { return; } final AudioDeviceInfo[] devices = audioManager.getDevices(AudioManager.GET_DEVICES_ALL); if (devices.length == 0) { return; } Logging.d(tag, "Audio Devices: "); for (AudioDeviceInfo device : devices) { StringBuilder info = new StringBuilder(); info.append(" ").append(deviceTypeToString(device.getType())); info.append(device.isSource() ? "(in): " : "(out): "); // An empty array indicates that the device supports arbitrary channel counts. if (device.getChannelCounts().length > 0) { info.append("channels=").append(Arrays.toString(device.getChannelCounts())); info.append(", "); } if (device.getEncodings().length > 0) { // Examples: ENCODING_PCM_16BIT = 2, ENCODING_PCM_FLOAT = 4. info.append("encodings=").append(Arrays.toString(device.getEncodings())); info.append(", "); } if (device.getSampleRates().length > 0) { info.append("sample rates=").append(Arrays.toString(device.getSampleRates())); info.append(", "); } info.append("id=").append(device.getId()); Logging.d(tag, info.toString()); } }
Example #25
Source File: WebRtcAudioTrack.java From webrtc_android with MIT License | 5 votes |
private void reportWebRtcAudioTrackStartError( AudioTrackStartErrorCode errorCode, String errorMessage) { Logging.e(TAG, "Start playout error: " + errorCode + ". " + errorMessage); WebRtcAudioUtils.logAudioState(TAG, context, audioManager); if (errorCallback != null) { errorCallback.onWebRtcAudioTrackStartError(errorCode, errorMessage); } }
Example #26
Source File: ProxyVideoSink.java From webrtc_android with MIT License | 5 votes |
@Override synchronized public void onFrame(VideoFrame frame) { if (target == null) { Logging.d(TAG, "Dropping frame in proxy because target is null."); return; } target.onFrame(frame); }
Example #27
Source File: WebRtcAudioEffects.java From webrtc_android with MIT License | 5 votes |
public boolean setAEC(boolean enable) { Logging.d(TAG, "setAEC(" + enable + ")"); if (!isAcousticEchoCancelerSupported()) { Logging.w(TAG, "Platform AEC is not supported"); shouldEnableAec = false; return false; } if (aec != null && (enable != shouldEnableAec)) { Logging.e(TAG, "Platform AEC state can't be modified while recording"); return false; } shouldEnableAec = enable; return true; }
Example #28
Source File: WebRtcAudioUtils.java From webrtc_android with MIT License | 5 votes |
@SuppressWarnings("NoSynchronizedMethodCheck") public static synchronized boolean useWebRtcBasedAcousticEchoCanceler() { if (useWebRtcBasedAcousticEchoCanceler) { Logging.w(TAG, "Overriding default behavior; now using WebRTC AEC!"); } return useWebRtcBasedAcousticEchoCanceler; }
Example #29
Source File: WebRtcAudioRecord.java From webrtc_android with MIT License | 5 votes |
private void reportWebRtcAudioRecordInitError(String errorMessage) { Logging.e(TAG, "Init recording error: " + errorMessage); WebRtcAudioUtils.logAudioState(TAG, context, audioManager); if (errorCallback != null) { errorCallback.onWebRtcAudioRecordInitError(errorMessage); } }
Example #30
Source File: WebRtcAudioRecord.java From webrtc_android with MIT License | 5 votes |
private void releaseAudioResources() { Logging.d(TAG, "releaseAudioResources"); if (audioRecord != null) { audioRecord.release(); audioRecord = null; } }