org.webrtc.AudioTrack Java Examples
The following examples show how to use
org.webrtc.AudioTrack.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: MainActivity.java From webrtc-android-tutorial with Apache License 2.0 | 6 votes |
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); // create PeerConnectionFactory PeerConnectionFactory.InitializationOptions initializationOptions = PeerConnectionFactory.InitializationOptions.builder(this).createInitializationOptions(); PeerConnectionFactory.initialize(initializationOptions); PeerConnectionFactory peerConnectionFactory = PeerConnectionFactory.builder().createPeerConnectionFactory(); // create AudioSource AudioSource audioSource = peerConnectionFactory.createAudioSource(new MediaConstraints()); AudioTrack audioTrack = peerConnectionFactory.createAudioTrack("101", audioSource); EglBase.Context eglBaseContext = EglBase.create().getEglBaseContext(); SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create("CaptureThread", eglBaseContext); // create VideoCapturer VideoCapturer videoCapturer = createCameraCapturer(); VideoSource videoSource = peerConnectionFactory.createVideoSource(videoCapturer.isScreencast()); videoCapturer.initialize(surfaceTextureHelper, getApplicationContext(), videoSource.getCapturerObserver()); videoCapturer.startCapture(480, 640, 30); SurfaceViewRenderer localView = findViewById(R.id.localView); localView.setMirror(true); localView.init(eglBaseContext, null); // create VideoTrack VideoTrack videoTrack = peerConnectionFactory.createVideoTrack("100", videoSource); // display in localView videoTrack.addSink(localView); }
Example #2
Source File: PeerConnectionChannel.java From owt-client-android with Apache License 2.0 | 6 votes |
protected void addStream(final MediaStream mediaStream) { DCHECK(mediaStream); DCHECK(pcExecutor); pcExecutor.execute(() -> { if (disposed()) { return; } ArrayList<String> streamIds = new ArrayList<>(); streamIds.add(mediaStream.getId()); for (AudioTrack audioTrack : mediaStream.audioTracks) { RtpSender audioSender = peerConnection.addTrack(audioTrack, streamIds); audioRtpSenders.put(mediaStream.getId(), audioSender); } for (VideoTrack videoTrack : mediaStream.videoTracks) { RtpSender videoSender = peerConnection.addTrack(videoTrack, streamIds); videoRtpSenders.put(mediaStream.getId(), videoSender); } }); }
Example #3
Source File: PeerConnectionObserver.java From iGap-Android with GNU Affero General Public License v3.0 | 6 votes |
@Override public void onAddStream(MediaStream stream) { for (AudioTrack audioTrack : stream.audioTracks) { audioTrack.setEnabled(true); } if (stream.videoTracks != null && stream.videoTracks.size() == 1) { VideoTrack videoTrack = stream.videoTracks.get(0); videoTrack.setEnabled(true); videoTrack.addSink(new VideoSink() { @Override public void onFrame(VideoFrame videoFrame) { if (G.onVideoCallFrame != null) { G.onVideoCallFrame.onRemoteFrame(videoFrame); } } }); } }
Example #4
Source File: WebRTC.java From iGap-Android with GNU Affero General Public License v3.0 | 5 votes |
public void unMuteSound() { if (mediaStream == null) { return; } for (AudioTrack audioTrack : mediaStream.audioTracks) { audioTrack.setEnabled(true); } }
Example #5
Source File: WebRTCWrapper.java From Conversations with GNU General Public License v3.0 | 5 votes |
void setMicrophoneEnabled(final boolean enabled) { final AudioTrack audioTrack = this.localAudioTrack; if (audioTrack == null) { throw new IllegalStateException("Local audio track does not exist (yet)"); } audioTrack.setEnabled(enabled); }
Example #6
Source File: WebRTCWrapper.java From Conversations with GNU General Public License v3.0 | 5 votes |
boolean isMicrophoneEnabled() { final AudioTrack audioTrack = this.localAudioTrack; if (audioTrack == null) { throw new IllegalStateException("Local audio track does not exist (yet)"); } try { return audioTrack.enabled(); } catch (final IllegalStateException e) { //sometimes UI might still be rendering the buttons when a background thread has already ended the call return false; } }
Example #7
Source File: LicodeConnector.java From licodeAndroidClient with MIT License | 5 votes |
@Override public void setAudioEnabled(boolean enabled) { if (mState != State.kConnected || lMS == null) { return; } for (AudioTrack audioTrack : lMS.audioTracks) { audioTrack.setEnabled(enabled); } }
Example #8
Source File: WebRTCWrapper.java From Pix-Art-Messenger with GNU General Public License v3.0 | 5 votes |
void setMicrophoneEnabled(final boolean enabled) { final AudioTrack audioTrack = this.localAudioTrack; if (audioTrack == null) { throw new IllegalStateException("Local audio track does not exist (yet)"); } audioTrack.setEnabled(enabled); }
Example #9
Source File: WebRTCWrapper.java From Pix-Art-Messenger with GNU General Public License v3.0 | 5 votes |
boolean isMicrophoneEnabled() { final AudioTrack audioTrack = this.localAudioTrack; if (audioTrack == null) { throw new IllegalStateException("Local audio track does not exist (yet)"); } try { return audioTrack.enabled(); } catch (final IllegalStateException e) { //sometimes UI might still be rendering the buttons when a background thread has already ended the call return false; } }
Example #10
Source File: WebRTC.java From iGap-Android with GNU Affero General Public License v3.0 | 5 votes |
public void muteSound() { if (mediaStream == null) { return; } for (AudioTrack audioTrack : mediaStream.audioTracks) { audioTrack.setEnabled(false); } }
Example #11
Source File: RTCCall.java From Meshenger with GNU General Public License v3.0 | 5 votes |
private MediaStream createStream() { upStream = factory.createLocalMediaStream("stream1"); AudioTrack audio = factory.createAudioTrack("audio1", factory.createAudioSource(new MediaConstraints())); upStream.addTrack(audio); upStream.addTrack(getVideoTrack()); //this.capturer.startCapture(500, 500, 30); return upStream; }
Example #12
Source File: RTCCall.java From meshenger-android with GNU General Public License v3.0 | 5 votes |
private MediaStream createStream() { upStream = factory.createLocalMediaStream("stream1"); AudioTrack audio = factory.createAudioTrack("audio1", factory.createAudioSource(new MediaConstraints())); upStream.addTrack(audio); upStream.addTrack(getVideoTrack()); //this.capturer.startCapture(500, 500, 30); return upStream; }
Example #13
Source File: WebRtcCallService.java From bcm-android with GNU General Public License v3.0 | 5 votes |
@Override public void onAddStream(MediaStream stream) { ALog.logForSecret(TAG, "onAddStream:" + stream); for (AudioTrack audioTrack : stream.audioTracks) { audioTrack.setEnabled(true); } if (stream.videoTracks != null && stream.videoTracks.size() == 1) { VideoTrack videoTrack = stream.videoTracks.get(0); videoTrack.setEnabled(true); videoTrack.addSink(remoteRenderer); } }
Example #14
Source File: MainActivity.java From webrtc-android-tutorial with Apache License 2.0 | 4 votes |
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); EglBase.Context eglBaseContext = EglBase.create().getEglBaseContext(); // create PeerConnectionFactory PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions .builder(this) .createInitializationOptions()); PeerConnectionFactory.Options options = new PeerConnectionFactory.Options(); DefaultVideoEncoderFactory defaultVideoEncoderFactory = new DefaultVideoEncoderFactory(eglBaseContext, true, true); DefaultVideoDecoderFactory defaultVideoDecoderFactory = new DefaultVideoDecoderFactory(eglBaseContext); peerConnectionFactory = PeerConnectionFactory.builder() .setOptions(options) .setVideoEncoderFactory(defaultVideoEncoderFactory) .setVideoDecoderFactory(defaultVideoDecoderFactory) .createPeerConnectionFactory(); SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create("CaptureThread", eglBaseContext); // create VideoCapturer VideoCapturer videoCapturer = createCameraCapturer(true); VideoSource videoSource = peerConnectionFactory.createVideoSource(videoCapturer.isScreencast()); videoCapturer.initialize(surfaceTextureHelper, getApplicationContext(), videoSource.getCapturerObserver()); videoCapturer.startCapture(480, 640, 30); localView = findViewById(R.id.localView); localView.setMirror(true); localView.init(eglBaseContext, null); // create VideoTrack VideoTrack videoTrack = peerConnectionFactory.createVideoTrack("100", videoSource); // // display in localView videoTrack.addSink(localView); remoteView = findViewById(R.id.remoteView); remoteView.setMirror(false); remoteView.init(eglBaseContext, null); AudioSource audioSource = peerConnectionFactory.createAudioSource(new MediaConstraints()); AudioTrack audioTrack = peerConnectionFactory.createAudioTrack("101", audioSource); mediaStream = peerConnectionFactory.createLocalMediaStream("mediaStream"); mediaStream.addTrack(videoTrack); mediaStream.addTrack(audioTrack); SignalingClient.get().setCallback(this); call(); }
Example #15
Source File: PeerConnectionClient.java From janus-gateway-android with MIT License | 4 votes |
private AudioTrack createAudioTrack() { audioSource = factory.createAudioSource(audioConstraints); localAudioTrack = factory.createAudioTrack(AUDIO_TRACK_ID, audioSource); localAudioTrack.setEnabled(enableAudio); return localAudioTrack; }
Example #16
Source File: LicodeConnector.java From licodeAndroidClient with MIT License | 4 votes |
/** begin streaming to server - MUST run on VcThread */ void doPublish(VideoStreamsView view) { if (mVideoCapturer != null) { return; } MediaConstraints videoConstraints = new MediaConstraints(); videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair( "maxWidth", "320")); videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair( "maxHeight", "240")); videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair( "maxFrameRate", "10")); MediaConstraints audioConstraints = new MediaConstraints(); audioConstraints.optional.add(new MediaConstraints.KeyValuePair( "googEchoCancellation2", "true")); audioConstraints.optional.add(new MediaConstraints.KeyValuePair( "googNoiseSuppression", "true")); lMS = sFactory.createLocalMediaStream("ARDAMS"); if (videoConstraints != null) { mVideoCapturer = getVideoCapturer(); mVideoSource = sFactory.createVideoSource(mVideoCapturer, videoConstraints); VideoTrack videoTrack = sFactory.createVideoTrack("ARDAMSv0", mVideoSource); lMS.addTrack(videoTrack); } if (audioConstraints != null) { AudioTrack audioTrack = sFactory.createAudioTrack("ARDAMSa0", sFactory.createAudioSource(audioConstraints)); lMS.addTrack(audioTrack); audioTrack.setEnabled(false); } StreamDescription stream = new StreamDescription("", false, true, true, false, null, mNick); MediaConstraints pcConstraints = makePcConstraints(); MyPcObserver pcObs = new MyPcObserver(new LicodeSdpObserver(stream, true), stream); PeerConnection pc = sFactory.createPeerConnection(mIceServers, pcConstraints, pcObs); pc.addStream(lMS, new MediaConstraints()); stream.setMedia(lMS); if (view != null) { stream.attachRenderer(new VideoCallbacks(view, VideoStreamsView.LOCAL_STREAM_ID)); } stream.initLocal(pc, pcObs.getSdpObserver()); }
Example #17
Source File: PeerConnectionClient.java From restcomm-android-sdk with GNU Affero General Public License v3.0 | 4 votes |
private AudioTrack createAudioTrack() { audioSource = factory.createAudioSource(audioConstraints); localAudioTrack = factory.createAudioTrack(AUDIO_TRACK_ID, audioSource); localAudioTrack.setEnabled(enableAudio); return localAudioTrack; }
Example #18
Source File: PeerConnectionClient.java From voip_android with BSD 3-Clause "New" or "Revised" License | 4 votes |
private AudioTrack createAudioTrack() { audioSource = factory.createAudioSource(audioConstraints); localAudioTrack = factory.createAudioTrack(AUDIO_TRACK_ID, audioSource); localAudioTrack.setEnabled(enableAudio); return localAudioTrack; }
Example #19
Source File: AndroidAudioTrack.java From actor-platform with GNU Affero General Public License v3.0 | 4 votes |
public AudioTrack getAudioTrack() { return audioTrack; }
Example #20
Source File: AndroidAudioTrack.java From actor-platform with GNU Affero General Public License v3.0 | 4 votes |
public AndroidAudioTrack(AudioTrack audioTrack, AndroidMediaStream stream) { this.audioTrack = audioTrack; this.stream = stream; }
Example #21
Source File: PeersManager.java From WebRTCapp with Apache License 2.0 | 4 votes |
public AudioTrack getLocalAudioTrack() { return localAudioTrack; }
Example #22
Source File: RemoteParticipant.java From WebRTCapp with Apache License 2.0 | 4 votes |
public AudioTrack getAudioTrack() { return audioTrack; }
Example #23
Source File: VideoChatActivity.java From AndroidRTC with MIT License | 4 votes |
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_video_chat); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); Bundle extras = getIntent().getExtras(); if (extras == null || !extras.containsKey(Constants.USER_NAME)) { Intent intent = new Intent(this, MainActivity.class); startActivity(intent); Toast.makeText(this, "Need to pass username to VideoChatActivity in intent extras (Constants.USER_NAME).", Toast.LENGTH_SHORT).show(); finish(); return; } this.username = extras.getString(Constants.USER_NAME, ""); this.mChatList = getListView(); this.mChatEditText = (EditText) findViewById(R.id.chat_input); this.mCallStatus = (TextView) findViewById(R.id.call_status); // Set up the List View for chatting List<ChatMessage> ll = new LinkedList<ChatMessage>(); mChatAdapter = new ChatAdapter(this, ll); mChatList.setAdapter(mChatAdapter); // First, we initiate the PeerConnectionFactory with our application context and some options. PeerConnectionFactory.initializeAndroidGlobals( this, // Context true, // Audio Enabled true, // Video Enabled true, // Hardware Acceleration Enabled null); // Render EGL Context PeerConnectionFactory pcFactory = new PeerConnectionFactory(); this.pnRTCClient = new PnRTCClient(Constants.PUB_KEY, Constants.SUB_KEY, this.username); List<PeerConnection.IceServer> servers = getXirSysIceServers(); if (!servers.isEmpty()){ this.pnRTCClient.setSignalParams(new PnSignalingParams()); } // Returns the number of cams & front/back face device name int camNumber = VideoCapturerAndroid.getDeviceCount(); String frontFacingCam = VideoCapturerAndroid.getNameOfFrontFacingDevice(); String backFacingCam = VideoCapturerAndroid.getNameOfBackFacingDevice(); // Creates a VideoCapturerAndroid instance for the device name VideoCapturer capturer = VideoCapturerAndroid.create(frontFacingCam); // First create a Video Source, then we can make a Video Track localVideoSource = pcFactory.createVideoSource(capturer, this.pnRTCClient.videoConstraints()); VideoTrack localVideoTrack = pcFactory.createVideoTrack(VIDEO_TRACK_ID, localVideoSource); // First we create an AudioSource then we can create our AudioTrack AudioSource audioSource = pcFactory.createAudioSource(this.pnRTCClient.audioConstraints()); AudioTrack localAudioTrack = pcFactory.createAudioTrack(AUDIO_TRACK_ID, audioSource); // To create our VideoRenderer, we can use the included VideoRendererGui for simplicity // First we need to set the GLSurfaceView that it should render to this.videoView = (GLSurfaceView) findViewById(R.id.gl_surface); // Then we set that view, and pass a Runnable to run once the surface is ready VideoRendererGui.setView(videoView, null); // Now that VideoRendererGui is ready, we can get our VideoRenderer. // IN THIS ORDER. Effects which is on top or bottom remoteRender = VideoRendererGui.create(0, 0, 100, 100, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, false); localRender = VideoRendererGui.create(0, 0, 100, 100, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true); // We start out with an empty MediaStream object, created with help from our PeerConnectionFactory // Note that LOCAL_MEDIA_STREAM_ID can be any string MediaStream mediaStream = pcFactory.createLocalMediaStream(LOCAL_MEDIA_STREAM_ID); // Now we can add our tracks. mediaStream.addTrack(localVideoTrack); mediaStream.addTrack(localAudioTrack); // First attach the RTC Listener so that callback events will be triggered this.pnRTCClient.attachRTCListener(new DemoRTCListener()); // Then attach your local media stream to the PnRTCClient. // This will trigger the onLocalStream callback. this.pnRTCClient.attachLocalMediaStream(mediaStream); // Listen on a channel. This is your "phone number," also set the max chat users. this.pnRTCClient.listenOn("Kevin"); this.pnRTCClient.setMaxConnections(1); // If the intent contains a number to dial, call it now that you are connected. // Else, remain listening for a call. if (extras.containsKey(Constants.CALL_USER)) { String callUser = extras.getString(Constants.CALL_USER, ""); connectToUser(callUser); } }
Example #24
Source File: MainActivity.java From krankygeek with MIT License | 4 votes |
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); AudioManager audioManager = (AudioManager) this.getSystemService(Context.AUDIO_SERVICE); audioManager.setMode(AudioManager.MODE_IN_COMMUNICATION); audioManager.setSpeakerphoneOn(true); PeerConnectionFactory.initializeAndroidGlobals( this, // Context true, // Audio Enabled true, // Video Enabled true, // Hardware Acceleration Enabled null); // Render EGL Context peerConnectionFactory = new PeerConnectionFactory(); VideoCapturerAndroid vc = VideoCapturerAndroid.create(VideoCapturerAndroid.getNameOfFrontFacingDevice(), null); localVideoSource = peerConnectionFactory.createVideoSource(vc, new MediaConstraints()); VideoTrack localVideoTrack = peerConnectionFactory.createVideoTrack(VIDEO_TRACK_ID, localVideoSource); localVideoTrack.setEnabled(true); AudioSource audioSource = peerConnectionFactory.createAudioSource(new MediaConstraints()); AudioTrack localAudioTrack = peerConnectionFactory.createAudioTrack(AUDIO_TRACK_ID, audioSource); localAudioTrack.setEnabled(true); localMediaStream = peerConnectionFactory.createLocalMediaStream(LOCAL_STREAM_ID); localMediaStream.addTrack(localVideoTrack); localMediaStream.addTrack(localAudioTrack); GLSurfaceView videoView = (GLSurfaceView) findViewById(R.id.glview_call); VideoRendererGui.setView(videoView, null); try { otherPeerRenderer = VideoRendererGui.createGui(0, 0, 100, 100, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true); VideoRenderer renderer = VideoRendererGui.createGui(50, 50, 50, 50, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true); localVideoTrack.addRenderer(renderer); } catch (Exception e) { e.printStackTrace(); } }
Example #25
Source File: WebRTC.java From iGap-Android with GNU Affero General Public License v3.0 | 4 votes |
private void addAudioTrack(MediaStream mediaStream) { AudioSource audioSource = peerConnectionFactoryInstance().createAudioSource(audioConstraintsGetInstance()); AudioTrack audioTrack = peerConnectionFactoryInstance().createAudioTrack("ARDAMSa0", audioSource); audioTrack.setEnabled(true); mediaStream.addTrack(audioTrack); }
Example #26
Source File: RemoteParticipant.java From WebRTCapp with Apache License 2.0 | 4 votes |
public void setAudioTrack(AudioTrack audioTrack) { this.audioTrack = audioTrack; }
Example #27
Source File: PeerConnectionClient.java From sample-videoRTC with Apache License 2.0 | 4 votes |
private AudioTrack createAudioTrack() { audioSource = factory.createAudioSource(audioConstraints); localAudioTrack = factory.createAudioTrack(AUDIO_TRACK_ID, audioSource); localAudioTrack.setEnabled(enableAudio); return localAudioTrack; }