org.webrtc.MediaStream Java Examples
The following examples show how to use
org.webrtc.MediaStream.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: VideoConferenceActivity.java From WebRTCapp with Apache License 2.0 | 6 votes |
public void gotRemoteStream(MediaStream stream, final RemoteParticipant remoteParticipant) { final VideoTrack videoTrack = stream.videoTracks.getFirst(); runOnUiThread(new Runnable() { @Override public void run() { remoteRenderer = new VideoRenderer(remoteParticipant.getVideoView()); remoteParticipant.getVideoView().setVisibility(View.VISIBLE); videoTrack.addRenderer(remoteRenderer); MediaStream mediaStream = peersManager.getPeerConnectionFactory().createLocalMediaStream("105"); remoteParticipant.setMediaStream(mediaStream); mediaStream.addTrack(peersManager.getLocalAudioTrack()); mediaStream.addTrack(peersManager.getLocalVideoTrack()); remoteParticipant.getPeerConnection().removeStream(mediaStream); remoteParticipant.getPeerConnection().addStream(mediaStream); } }); }
Example #2
Source File: PeerConnectionClient.java From janus-gateway-android with MIT License | 6 votes |
@Override public void onAddStream(final MediaStream stream) { executor.execute(new Runnable() { @Override public void run() { if (peerConnection == null || isError) { return; } Log.d(TAG, "=========== onAddStream =========="); if (stream.videoTracks.size() == 1) { remoteVideoTrack = stream.videoTracks.get(0); remoteVideoTrack.setEnabled(true); connection.videoTrack = remoteVideoTrack; events.onRemoteRender(connection); } } }); }
Example #3
Source File: PeerConnectionObserver.java From iGap-Android with GNU Affero General Public License v3.0 | 6 votes |
@Override public void onAddStream(MediaStream stream) { for (AudioTrack audioTrack : stream.audioTracks) { audioTrack.setEnabled(true); } if (stream.videoTracks != null && stream.videoTracks.size() == 1) { VideoTrack videoTrack = stream.videoTracks.get(0); videoTrack.setEnabled(true); videoTrack.addSink(new VideoSink() { @Override public void onFrame(VideoFrame videoFrame) { if (G.onVideoCallFrame != null) { G.onVideoCallFrame.onRemoteFrame(videoFrame); } } }); } }
Example #4
Source File: MainActivity.java From webrtc-android-tutorial with Apache License 2.0 | 6 votes |
private void call() { List<PeerConnection.IceServer> iceServers = new ArrayList<>(); iceServers.add(PeerConnection.IceServer.builder("stun:stun.l.google.com:19302").createIceServer()); peerConnection = peerConnectionFactory.createPeerConnection(iceServers, new PeerConnectionAdapter("localconnection") { @Override public void onIceCandidate(IceCandidate iceCandidate) { super.onIceCandidate(iceCandidate); SignalingClient.get().sendIceCandidate(iceCandidate); } @Override public void onAddStream(MediaStream mediaStream) { super.onAddStream(mediaStream); VideoTrack remoteVideoTrack = mediaStream.videoTracks.get(0); runOnUiThread(() -> { remoteVideoTrack.addSink(remoteView); }); } }); peerConnection.addStream(mediaStream); }
Example #5
Source File: PeerConnectionChannel.java From owt-client-android with Apache License 2.0 | 6 votes |
protected void addStream(final MediaStream mediaStream) { DCHECK(mediaStream); DCHECK(pcExecutor); pcExecutor.execute(() -> { if (disposed()) { return; } ArrayList<String> streamIds = new ArrayList<>(); streamIds.add(mediaStream.getId()); for (AudioTrack audioTrack : mediaStream.audioTracks) { RtpSender audioSender = peerConnection.addTrack(audioTrack, streamIds); audioRtpSenders.put(mediaStream.getId(), audioSender); } for (VideoTrack videoTrack : mediaStream.videoTracks) { RtpSender videoSender = peerConnection.addTrack(videoTrack, streamIds); videoRtpSenders.put(mediaStream.getId(), videoSender); } }); }
Example #6
Source File: AppRTCDemoActivity.java From WebRTCDemo with BSD 3-Clause "New" or "Revised" License | 5 votes |
@Override public void onAddStream(final MediaStream stream){ runOnUiThread(new Runnable() { public void run() { abortUnless(stream.audioTracks.size() <= 1 && stream.videoTracks.size() <= 1, "Weird-looking stream: " + stream); if (stream.videoTracks.size() == 1) { stream.videoTracks.get(0).addRenderer( new VideoRenderer(remoteRender)); } } }); }
Example #7
Source File: AppRTCDemoActivity.java From droidkit-webrtc with BSD 3-Clause "New" or "Revised" License | 5 votes |
@Override public void onRemoveStream(final MediaStream stream){ runOnUiThread(new Runnable() { public void run() { stream.videoTracks.get(0).dispose(); } }); }
Example #8
Source File: PeerConnectionClient.java From janus-gateway-android with MIT License | 5 votes |
@Override public void onRemoveStream(final MediaStream stream) { executor.execute(new Runnable() { @Override public void run() { remoteVideoTrack = null; } }); }
Example #9
Source File: LicodeConnector.java From licodeAndroidClient with MIT License | 5 votes |
@Override public void onAddStream(final MediaStream media) { if (mSdpObserver.isLocal()) { return; } if (media.videoTracks.size() == 1 && mDesc != null) { ((StreamDescription) mDesc).setMedia(media); triggerMediaAvailable(mDesc); } }
Example #10
Source File: RTCCall.java From meshenger-android with GNU General Public License v3.0 | 5 votes |
private MediaStream createStream() { upStream = factory.createLocalMediaStream("stream1"); AudioTrack audio = factory.createAudioTrack("audio1", factory.createAudioSource(new MediaConstraints())); upStream.addTrack(audio); upStream.addTrack(getVideoTrack()); //this.capturer.startCapture(500, 500, 30); return upStream; }
Example #11
Source File: PeerConnectionClient.java From Yahala-Messenger with MIT License | 5 votes |
@Override public void onRemoveStream(final MediaStream stream) { executor.execute(new Runnable() { @Override public void run() { remoteVideoTrack = null; } }); }
Example #12
Source File: ConferencePeerConnectionChannel.java From owt-client-android with Apache License 2.0 | 5 votes |
@Override public void onAddStream(final MediaStream mediaStream) { DCHECK(stream); callbackExecutor.execute(() -> { ((RemoteStream) stream).setMediaStream(mediaStream); observer.onAddStream(key, (owt.base.RemoteStream) stream); }); }
Example #13
Source File: RTCCall.java From meshenger-android with GNU General Public License v3.0 | 5 votes |
private void handleMediaStream(MediaStream stream) { log("handleMediaStream"); if (this.remoteRenderer == null || stream.videoTracks.size() == 0) { return; } new Handler(Looper.getMainLooper()).post(() -> { //remoteRenderer.setBackgroundColor(Color.TRANSPARENT); remoteRenderer.init(this.sharedContext, null); stream.videoTracks.get(0).addSink(remoteRenderer); }); }
Example #14
Source File: RTCCall.java From Meshenger with GNU General Public License v3.0 | 5 votes |
private void handleMediaStream(MediaStream stream) { log("handling video stream"); if (this.remoteRenderer == null || stream.videoTracks.size() == 0) return; new Handler(Looper.getMainLooper()).post(() -> { //remoteRenderer.setBackgroundColor(Color.TRANSPARENT); remoteRenderer.init(this.sharedContext, null); stream.videoTracks.get(0).addSink(remoteRenderer); }); }
Example #15
Source File: PeerConnectionClient.java From voip_android with BSD 3-Clause "New" or "Revised" License | 5 votes |
@Override public void onRemoveStream(final MediaStream stream) { executor.execute(new Runnable() { @Override public void run() { remoteVideoTrack = null; } }); }
Example #16
Source File: P2PPeerConnectionChannel.java From owt-client-android with Apache License 2.0 | 5 votes |
@Override public void onRemoveStream(final MediaStream mediaStream) { String id = mediaStream.getId(); callbackExecutor.execute(() -> { Log.d(LOG_TAG, "onRemoveStream"); if (remoteStreams.containsKey(id)) { remoteStreams.remove(id).onEnded(); } }); }
Example #17
Source File: MediaResourceManager.java From webrtcpeer-android with Apache License 2.0 | 5 votes |
public void run() { Log.d(TAG, "Attaching VideoRenderer to remote stream (" + remoteStream + ")"); // Check if the remote stream has a video track if (remoteStream.videoTracks.size() == 1) { // Get the video track VideoTrack remoteVideoTrack = remoteStream.videoTracks.get(0); // Set video track enabled if we have enabled video rendering remoteVideoTrack.setEnabled(renderVideo); VideoRenderer videoRenderer = remoteVideoRenderers.get(remoteRender); if (videoRenderer != null) { MediaStream mediaStream = remoteVideoMediaStreams.get(videoRenderer); if (mediaStream != null) { VideoTrack videoTrack = remoteVideoTracks.get(mediaStream); if (videoTrack != null) { videoTrack.removeRenderer(videoRenderer); } } } VideoRenderer newVideoRenderer = new VideoRenderer(remoteRender); remoteVideoTrack.addRenderer(newVideoRenderer); remoteVideoRenderers.put(remoteRender, newVideoRenderer); remoteVideoMediaStreams.put(newVideoRenderer, remoteStream); remoteVideoTracks.put(remoteStream, remoteVideoTrack); Log.d(TAG, "Attached."); } }
Example #18
Source File: Peer.java From webrtc_android with MIT License | 5 votes |
@Override public void onAddStream(MediaStream stream) { Log.i(TAG, "onAddStream:"); stream.audioTracks.get(0).setEnabled(true); _remoteStream = stream; if (mEvent != null) { mEvent.onRemoteStream(mUserId, stream); } }
Example #19
Source File: Peer.java From webrtc_android with MIT License | 5 votes |
@Override public void onRemoveStream(MediaStream stream) { Log.i(TAG, "onRemoveStream:"); if (mEvent != null) { mEvent.onRemoveStream(mUserId, stream); } }
Example #20
Source File: MediaStreamFactory.java From owt-client-android with Apache License 2.0 | 5 votes |
MediaStream createMediaStream(VideoCapturer videoCapturer, AudioTrackConstraints audioMediaConstraints) { RCHECK(videoCapturer != null || audioMediaConstraints != null); String label = UUID.randomUUID().toString(); MediaStream mediaStream = PCFactoryProxy.instance().createLocalMediaStream(label); if (videoCapturer != null) { VideoSource videoSource = PCFactoryProxy.instance().createVideoSource( videoCapturer.isScreencast()); SurfaceTextureHelper helper = SurfaceTextureHelper.create("CT", localContext); videoCapturer.initialize(helper, ContextInitialization.context, videoSource.getCapturerObserver()); videoCapturer.startCapture(videoCapturer.getWidth(), videoCapturer.getHeight(), videoCapturer.getFps()); VideoTrack videoTrack = PCFactoryProxy.instance().createVideoTrack(label + "v0", videoSource); videoTrack.setEnabled(true); mediaStream.addTrack(videoTrack); unsharedVideoSources.put(label, videoSource); } if (audioMediaConstraints != null) { if (sharedAudioSource == null) { sharedAudioSource = PCFactoryProxy.instance().createAudioSource( audioMediaConstraints.convertToWebRTCConstraints()); } audioSourceRef++; mediaStream.addTrack( PCFactoryProxy.instance().createAudioTrack(label + "a0", sharedAudioSource)); } return mediaStream; }
Example #21
Source File: RoomChatPresenter.java From VideoMeeting with Apache License 2.0 | 5 votes |
private void addRender(MediaStream stream, int position) { VideoRenderer.Callbacks render; L.d("addRender position is " + position); if (position == 0) { render = localRender; } else { render = VideoRendererGui.create(position % 2 == 0 ? 0 : 50, position / 2 * 50, 50, 50, scalingType, false); } stream.videoTracks.get(0).addRenderer(new VideoRenderer(render)); }
Example #22
Source File: RtcActivity.java From imsdk-android with MIT License | 5 votes |
@Override public void onAddRemoteStream(final MediaStream remoteStream) { if(client == null||remoteRender == null) return; stopPlayTone(); if(remoteStream.audioTracks.size()>1|| remoteStream.videoTracks.size()>1) { LogUtil.e("Wired looking stream: "+remoteStream); return; } if(!videoEnable&&remoteStream.audioTracks.size()==1) { remoteStream.audioTracks.get(0).setEnabled(true); } if(remoteStream.videoTracks.size() == 1) { remoteStream.videoTracks.get(0).addRenderer(new VideoRenderer(remoteRender)); remoteStream.videoTracks.get(0).setEnabled(videoEnable); if(videoEnable) { VideoRendererGui.update(remoteRender, REMOTE_X, REMOTE_Y, REMOTE_WIDTH, REMOTE_HEIGHT, scalingType, false); VideoRendererGui.update(localRender, LOCAL_X_CONNECTED, LOCAL_Y_CONNECTED, LOCAL_WIDTH_CONNECTED, LOCAL_HEIGHT_CONNECTED, scalingType, true); } } }
Example #23
Source File: WebRtcClient.java From imsdk-android with MIT License | 5 votes |
@Override public void onAddStream(MediaStream mediaStream) { LogUtil.d(TAG, "onAddStream " + mediaStream.label()); Logger.i(TAG + " onAddStream " + mediaStream.label()); if(mListener != null) mListener.onAddRemoteStream(mediaStream); }
Example #24
Source File: CustomPeerConnectionObserver.java From WebRTCapp with Apache License 2.0 | 4 votes |
@Override public void onRemoveStream(MediaStream mediaStream) { Log.d(logTag, "onRemoveStream() called with: mediaStream = [" + mediaStream + "]"); }
Example #25
Source File: VideoChatActivity.java From AndroidRTC with MIT License | 4 votes |
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_video_chat); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); Bundle extras = getIntent().getExtras(); if (extras == null || !extras.containsKey(Constants.USER_NAME)) { Intent intent = new Intent(this, MainActivity.class); startActivity(intent); Toast.makeText(this, "Need to pass username to VideoChatActivity in intent extras (Constants.USER_NAME).", Toast.LENGTH_SHORT).show(); finish(); return; } this.username = extras.getString(Constants.USER_NAME, ""); this.mChatList = getListView(); this.mChatEditText = (EditText) findViewById(R.id.chat_input); this.mCallStatus = (TextView) findViewById(R.id.call_status); // Set up the List View for chatting List<ChatMessage> ll = new LinkedList<ChatMessage>(); mChatAdapter = new ChatAdapter(this, ll); mChatList.setAdapter(mChatAdapter); // First, we initiate the PeerConnectionFactory with our application context and some options. PeerConnectionFactory.initializeAndroidGlobals( this, // Context true, // Audio Enabled true, // Video Enabled true, // Hardware Acceleration Enabled null); // Render EGL Context PeerConnectionFactory pcFactory = new PeerConnectionFactory(); this.pnRTCClient = new PnRTCClient(Constants.PUB_KEY, Constants.SUB_KEY, this.username); List<PeerConnection.IceServer> servers = getXirSysIceServers(); if (!servers.isEmpty()){ this.pnRTCClient.setSignalParams(new PnSignalingParams()); } // Returns the number of cams & front/back face device name int camNumber = VideoCapturerAndroid.getDeviceCount(); String frontFacingCam = VideoCapturerAndroid.getNameOfFrontFacingDevice(); String backFacingCam = VideoCapturerAndroid.getNameOfBackFacingDevice(); // Creates a VideoCapturerAndroid instance for the device name VideoCapturer capturer = VideoCapturerAndroid.create(frontFacingCam); // First create a Video Source, then we can make a Video Track localVideoSource = pcFactory.createVideoSource(capturer, this.pnRTCClient.videoConstraints()); VideoTrack localVideoTrack = pcFactory.createVideoTrack(VIDEO_TRACK_ID, localVideoSource); // First we create an AudioSource then we can create our AudioTrack AudioSource audioSource = pcFactory.createAudioSource(this.pnRTCClient.audioConstraints()); AudioTrack localAudioTrack = pcFactory.createAudioTrack(AUDIO_TRACK_ID, audioSource); // To create our VideoRenderer, we can use the included VideoRendererGui for simplicity // First we need to set the GLSurfaceView that it should render to this.videoView = (GLSurfaceView) findViewById(R.id.gl_surface); // Then we set that view, and pass a Runnable to run once the surface is ready VideoRendererGui.setView(videoView, null); // Now that VideoRendererGui is ready, we can get our VideoRenderer. // IN THIS ORDER. Effects which is on top or bottom remoteRender = VideoRendererGui.create(0, 0, 100, 100, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, false); localRender = VideoRendererGui.create(0, 0, 100, 100, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true); // We start out with an empty MediaStream object, created with help from our PeerConnectionFactory // Note that LOCAL_MEDIA_STREAM_ID can be any string MediaStream mediaStream = pcFactory.createLocalMediaStream(LOCAL_MEDIA_STREAM_ID); // Now we can add our tracks. mediaStream.addTrack(localVideoTrack); mediaStream.addTrack(localAudioTrack); // First attach the RTC Listener so that callback events will be triggered this.pnRTCClient.attachRTCListener(new DemoRTCListener()); // Then attach your local media stream to the PnRTCClient. // This will trigger the onLocalStream callback. this.pnRTCClient.attachLocalMediaStream(mediaStream); // Listen on a channel. This is your "phone number," also set the max chat users. this.pnRTCClient.listenOn("Kevin"); this.pnRTCClient.setMaxConnections(1); // If the intent contains a number to dial, call it now that you are connected. // Else, remain listening for a call. if (extras.containsKey(Constants.CALL_USER)) { String callUser = extras.getString(Constants.CALL_USER, ""); connectToUser(callUser); } }
Example #26
Source File: RoomChatPresenter.java From VideoMeeting with Apache License 2.0 | 4 votes |
/** * 新的端连接到了本机 * @param remoteStream * @param endPoint */ @Override public void onAddRemoteStream(MediaStream remoteStream, int endPoint) { L.d("onAddRemoteStream endPoint " + endPoint); addRender(remoteStream, endPoint); }
Example #27
Source File: LicodeConnector.java From licodeAndroidClient with MIT License | 4 votes |
@Override public void onRemoveStream(MediaStream arg0) { // stream gone? }
Example #28
Source File: AppRTCDemoActivity.java From WebRTCDemo with BSD 3-Clause "New" or "Revised" License | 4 votes |
@Override public void onIceServers(List<PeerConnection.IceServer> iceServers) { factory = new PeerConnectionFactory(); MediaConstraints pcConstraints = appRtcClient.pcConstraints(); pcConstraints.optional.add( new MediaConstraints.KeyValuePair("RtpDataChannels", "true")); pc = factory.createPeerConnection(iceServers, pcConstraints, pcObserver); createDataChannelToRegressionTestBug2302(pc); // See method comment. // Uncomment to get ALL WebRTC tracing and SENSITIVE libjingle logging. // NOTE: this _must_ happen while |factory| is alive! // Logging.enableTracing( // "logcat:", // EnumSet.of(Logging.TraceLevel.TRACE_ALL), // Logging.Severity.LS_SENSITIVE); { final PeerConnection finalPC = pc; final Runnable repeatedStatsLogger = new Runnable() { public void run() { synchronized (quit[0]) { if (quit[0]) { return; } final Runnable runnableThis = this; if (hudView.getVisibility() == View.INVISIBLE) { vsv.postDelayed(runnableThis, 1000); return; } boolean success = finalPC.getStats(new StatsObserver() { public void onComplete(final StatsReport[] reports) { runOnUiThread(new Runnable() { public void run() { updateHUD(reports); } }); for (StatsReport report : reports) { Log.d(TAG, "Stats: " + report.toString()); } vsv.postDelayed(runnableThis, 1000); } }, null); if (!success) { throw new RuntimeException("getStats() return false!"); } } } }; vsv.postDelayed(repeatedStatsLogger, 1000); } { logAndToast("Creating local video source..."); MediaStream lMS = factory.createLocalMediaStream("ARDAMS"); if (appRtcClient.videoConstraints() != null) { VideoCapturer capturer = getVideoCapturer(); videoSource = factory.createVideoSource( capturer, appRtcClient.videoConstraints()); VideoTrack videoTrack = factory.createVideoTrack("ARDAMSv0", videoSource); videoTrack.addRenderer(new VideoRenderer(localRender)); lMS.addTrack(videoTrack); } if (appRtcClient.audioConstraints() != null) { lMS.addTrack(factory.createAudioTrack( "ARDAMSa0", factory.createAudioSource(appRtcClient.audioConstraints()))); } pc.addStream(lMS, new MediaConstraints()); } logAndToast("Waiting for ICE candidates..."); }
Example #29
Source File: PnPeerConnectionClient.java From android-webrtc-api with MIT License | 4 votes |
public void setLocalMediaStream(MediaStream localStream){ this.localMediaStream = localStream; mRtcListener.onLocalStream(localStream); }
Example #30
Source File: WebRTCNativeMgr.java From appinventor-extensions with Apache License 2.0 | 4 votes |
public void onAddStream(MediaStream mediaStream) { }