org.webrtc.VideoSource Java Examples
The following examples show how to use
org.webrtc.VideoSource.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: MainActivity.java From webrtc-android-tutorial with Apache License 2.0 | 6 votes |
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); // create PeerConnectionFactory PeerConnectionFactory.InitializationOptions initializationOptions = PeerConnectionFactory.InitializationOptions.builder(this).createInitializationOptions(); PeerConnectionFactory.initialize(initializationOptions); PeerConnectionFactory peerConnectionFactory = PeerConnectionFactory.builder().createPeerConnectionFactory(); // create AudioSource AudioSource audioSource = peerConnectionFactory.createAudioSource(new MediaConstraints()); AudioTrack audioTrack = peerConnectionFactory.createAudioTrack("101", audioSource); EglBase.Context eglBaseContext = EglBase.create().getEglBaseContext(); SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create("CaptureThread", eglBaseContext); // create VideoCapturer VideoCapturer videoCapturer = createCameraCapturer(); VideoSource videoSource = peerConnectionFactory.createVideoSource(videoCapturer.isScreencast()); videoCapturer.initialize(surfaceTextureHelper, getApplicationContext(), videoSource.getCapturerObserver()); videoCapturer.startCapture(480, 640, 30); SurfaceViewRenderer localView = findViewById(R.id.localView); localView.setMirror(true); localView.init(eglBaseContext, null); // create VideoTrack VideoTrack videoTrack = peerConnectionFactory.createVideoTrack("100", videoSource); // display in localView videoTrack.addSink(localView); }
Example #2
Source File: VideoChatHelper.java From Socket.io-FLSocketIM-Android with MIT License | 5 votes |
private void createLocalStream() { localMediaStream = factory.createLocalMediaStream("ARDAMS"); // 音频 AudioSource audioSource = factory.createAudioSource(new MediaConstraints()); localMediaStream.addTrack(factory.createAudioTrack("ARDAMSa0", audioSource)); // 视频 String frontCameraDeviceName = VideoCapturerAndroid.getNameOfFrontFacingDevice(); VideoCapturer capture = VideoCapturerAndroid.create(frontCameraDeviceName); VideoSource videoSource = factory.createVideoSource(capture, localVideoConstraints()); localMediaStream.addTrack(factory.createVideoTrack("ARDAMSv0", videoSource)); callBack.onSetLocalStream(localMediaStream, myId); }
Example #3
Source File: PeersManager.java From WebRTCapp with Apache License 2.0 | 5 votes |
public void start() { PeerConnectionFactory.initializeAndroidGlobals(activity, true); PeerConnectionFactory.Options options = new PeerConnectionFactory.Options(); peerConnectionFactory = new PeerConnectionFactory(options); videoGrabberAndroid = createVideoGrabber(); MediaConstraints constraints = new MediaConstraints(); VideoSource videoSource = peerConnectionFactory.createVideoSource(videoGrabberAndroid); localVideoTrack = peerConnectionFactory.createVideoTrack("100", videoSource); AudioSource audioSource = peerConnectionFactory.createAudioSource(constraints); localAudioTrack = peerConnectionFactory.createAudioTrack("101", audioSource); if (videoGrabberAndroid != null) { videoGrabberAndroid.startCapture(1000, 1000, 30); } localRenderer = new VideoRenderer(localVideoView); localVideoTrack.addRenderer(localRenderer); MediaConstraints sdpConstraints = new MediaConstraints(); sdpConstraints.mandatory.add(new MediaConstraints.KeyValuePair("offerToReceiveAudio", "true")); sdpConstraints.mandatory.add(new MediaConstraints.KeyValuePair("offerToReceiveVideo", "true")); createLocalPeerConnection(sdpConstraints); }
Example #4
Source File: MediaStreamFactory.java From owt-client-android with Apache License 2.0 | 5 votes |
MediaStream createMediaStream(VideoCapturer videoCapturer, AudioTrackConstraints audioMediaConstraints) { RCHECK(videoCapturer != null || audioMediaConstraints != null); String label = UUID.randomUUID().toString(); MediaStream mediaStream = PCFactoryProxy.instance().createLocalMediaStream(label); if (videoCapturer != null) { VideoSource videoSource = PCFactoryProxy.instance().createVideoSource( videoCapturer.isScreencast()); SurfaceTextureHelper helper = SurfaceTextureHelper.create("CT", localContext); videoCapturer.initialize(helper, ContextInitialization.context, videoSource.getCapturerObserver()); videoCapturer.startCapture(videoCapturer.getWidth(), videoCapturer.getHeight(), videoCapturer.getFps()); VideoTrack videoTrack = PCFactoryProxy.instance().createVideoTrack(label + "v0", videoSource); videoTrack.setEnabled(true); mediaStream.addTrack(videoTrack); unsharedVideoSources.put(label, videoSource); } if (audioMediaConstraints != null) { if (sharedAudioSource == null) { sharedAudioSource = PCFactoryProxy.instance().createAudioSource( audioMediaConstraints.convertToWebRTCConstraints()); } audioSourceRef++; mediaStream.addTrack( PCFactoryProxy.instance().createAudioTrack(label + "a0", sharedAudioSource)); } return mediaStream; }
Example #5
Source File: MainActivity.java From webrtc-android-tutorial with Apache License 2.0 | 4 votes |
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); EglBase.Context eglBaseContext = EglBase.create().getEglBaseContext(); // create PeerConnectionFactory PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions .builder(this) .createInitializationOptions()); PeerConnectionFactory.Options options = new PeerConnectionFactory.Options(); DefaultVideoEncoderFactory defaultVideoEncoderFactory = new DefaultVideoEncoderFactory(eglBaseContext, true, true); DefaultVideoDecoderFactory defaultVideoDecoderFactory = new DefaultVideoDecoderFactory(eglBaseContext); peerConnectionFactory = PeerConnectionFactory.builder() .setOptions(options) .setVideoEncoderFactory(defaultVideoEncoderFactory) .setVideoDecoderFactory(defaultVideoDecoderFactory) .createPeerConnectionFactory(); SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create("CaptureThread", eglBaseContext); // create VideoCapturer VideoCapturer videoCapturer = createCameraCapturer(true); VideoSource videoSource = peerConnectionFactory.createVideoSource(videoCapturer.isScreencast()); videoCapturer.initialize(surfaceTextureHelper, getApplicationContext(), videoSource.getCapturerObserver()); videoCapturer.startCapture(480, 640, 30); localView = findViewById(R.id.localView); localView.setMirror(true); localView.init(eglBaseContext, null); // create VideoTrack VideoTrack videoTrack = peerConnectionFactory.createVideoTrack("100", videoSource); // // display in localView // videoTrack.addSink(localView); SurfaceTextureHelper remoteSurfaceTextureHelper = SurfaceTextureHelper.create("RemoteCaptureThread", eglBaseContext); // create VideoCapturer VideoCapturer remoteVideoCapturer = createCameraCapturer(false); VideoSource remoteVideoSource = peerConnectionFactory.createVideoSource(remoteVideoCapturer.isScreencast()); remoteVideoCapturer.initialize(remoteSurfaceTextureHelper, getApplicationContext(), remoteVideoSource.getCapturerObserver()); remoteVideoCapturer.startCapture(480, 640, 30); remoteView = findViewById(R.id.remoteView); remoteView.setMirror(false); remoteView.init(eglBaseContext, null); // create VideoTrack VideoTrack remoteVideoTrack = peerConnectionFactory.createVideoTrack("102", remoteVideoSource); // // display in remoteView // remoteVideoTrack.addSink(remoteView); mediaStreamLocal = peerConnectionFactory.createLocalMediaStream("mediaStreamLocal"); mediaStreamLocal.addTrack(videoTrack); mediaStreamRemote = peerConnectionFactory.createLocalMediaStream("mediaStreamRemote"); mediaStreamRemote.addTrack(remoteVideoTrack); call(mediaStreamLocal, mediaStreamRemote); }
Example #6
Source File: MainActivity.java From webrtc-android-tutorial with Apache License 2.0 | 4 votes |
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); EglBase.Context eglBaseContext = EglBase.create().getEglBaseContext(); // create PeerConnectionFactory PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions .builder(this) .createInitializationOptions()); PeerConnectionFactory.Options options = new PeerConnectionFactory.Options(); DefaultVideoEncoderFactory defaultVideoEncoderFactory = new DefaultVideoEncoderFactory(eglBaseContext, true, true); DefaultVideoDecoderFactory defaultVideoDecoderFactory = new DefaultVideoDecoderFactory(eglBaseContext); peerConnectionFactory = PeerConnectionFactory.builder() .setOptions(options) .setVideoEncoderFactory(defaultVideoEncoderFactory) .setVideoDecoderFactory(defaultVideoDecoderFactory) .createPeerConnectionFactory(); SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create("CaptureThread", eglBaseContext); // create VideoCapturer VideoCapturer videoCapturer = createCameraCapturer(true); VideoSource videoSource = peerConnectionFactory.createVideoSource(videoCapturer.isScreencast()); videoCapturer.initialize(surfaceTextureHelper, getApplicationContext(), videoSource.getCapturerObserver()); videoCapturer.startCapture(480, 640, 30); localView = findViewById(R.id.localView); localView.setMirror(true); localView.init(eglBaseContext, null); // create VideoTrack VideoTrack videoTrack = peerConnectionFactory.createVideoTrack("100", videoSource); // // display in localView videoTrack.addSink(localView); remoteView = findViewById(R.id.remoteView); remoteView.setMirror(false); remoteView.init(eglBaseContext, null); AudioSource audioSource = peerConnectionFactory.createAudioSource(new MediaConstraints()); AudioTrack audioTrack = peerConnectionFactory.createAudioTrack("101", audioSource); mediaStream = peerConnectionFactory.createLocalMediaStream("mediaStream"); mediaStream.addTrack(videoTrack); mediaStream.addTrack(audioTrack); SignalingClient.get().setCallback(this); call(); }
Example #7
Source File: CallActivity.java From RTCStartupDemo with GNU General Public License v3.0 | 4 votes |
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_call); mLogcatView = findViewById(R.id.LogcatView); mStartCallBtn = findViewById(R.id.StartCallButton); mEndCallBtn = findViewById(R.id.EndCallButton); RTCSignalClient.getInstance().setSignalEventListener(mOnSignalEventListener); String serverAddr = getIntent().getStringExtra("ServerAddr"); String roomName = getIntent().getStringExtra("RoomName"); RTCSignalClient.getInstance().joinRoom(serverAddr, UUID.randomUUID().toString(), roomName); mRootEglBase = EglBase.create(); mLocalSurfaceView = findViewById(R.id.LocalSurfaceView); mRemoteSurfaceView = findViewById(R.id.RemoteSurfaceView); mLocalSurfaceView.init(mRootEglBase.getEglBaseContext(), null); mLocalSurfaceView.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL); mLocalSurfaceView.setMirror(true); mLocalSurfaceView.setEnableHardwareScaler(false /* enabled */); mRemoteSurfaceView.init(mRootEglBase.getEglBaseContext(), null); mRemoteSurfaceView.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL); mRemoteSurfaceView.setMirror(true); mRemoteSurfaceView.setEnableHardwareScaler(true /* enabled */); mRemoteSurfaceView.setZOrderMediaOverlay(true); ProxyVideoSink videoSink = new ProxyVideoSink(); videoSink.setTarget(mLocalSurfaceView); mPeerConnectionFactory = createPeerConnectionFactory(this); // NOTE: this _must_ happen while PeerConnectionFactory is alive! Logging.enableLogToDebugOutput(Logging.Severity.LS_VERBOSE); mVideoCapturer = createVideoCapturer(); mSurfaceTextureHelper = SurfaceTextureHelper.create("CaptureThread", mRootEglBase.getEglBaseContext()); VideoSource videoSource = mPeerConnectionFactory.createVideoSource(false); mVideoCapturer.initialize(mSurfaceTextureHelper, getApplicationContext(), videoSource.getCapturerObserver()); mVideoTrack = mPeerConnectionFactory.createVideoTrack(VIDEO_TRACK_ID, videoSource); mVideoTrack.setEnabled(true); mVideoTrack.addSink(videoSink); AudioSource audioSource = mPeerConnectionFactory.createAudioSource(new MediaConstraints()); mAudioTrack = mPeerConnectionFactory.createAudioTrack(AUDIO_TRACK_ID, audioSource); mAudioTrack.setEnabled(true); }
Example #8
Source File: MediaStreamFactory.java From owt-client-android with Apache License 2.0 | 4 votes |
void onVideoSourceRelease(String label) { DCHECK(unsharedVideoSources.containsKey(label)); VideoSource videoSource = unsharedVideoSources.get(label); unsharedVideoSources.remove(label); videoSource.dispose(); }
Example #9
Source File: WebRTCWrapper.java From Pix-Art-Messenger with GNU General Public License v3.0 | 4 votes |
synchronized void initializePeerConnection(final Set<Media> media, final List<PeerConnection.IceServer> iceServers) throws InitializationException { Preconditions.checkState(this.eglBase != null); Preconditions.checkNotNull(media); Preconditions.checkArgument(media.size() > 0, "media can not be empty when initializing peer connection"); final boolean setUseHardwareAcousticEchoCanceler = WebRtcAudioEffects.canUseAcousticEchoCanceler() && !HARDWARE_AEC_BLACKLIST.contains(Build.MODEL); Log.d(Config.LOGTAG, String.format("setUseHardwareAcousticEchoCanceler(%s) model=%s", setUseHardwareAcousticEchoCanceler, Build.MODEL)); PeerConnectionFactory peerConnectionFactory = PeerConnectionFactory.builder() .setVideoDecoderFactory(new DefaultVideoDecoderFactory(eglBase.getEglBaseContext())) .setVideoEncoderFactory(new DefaultVideoEncoderFactory(eglBase.getEglBaseContext(), true, true)) .setAudioDeviceModule(JavaAudioDeviceModule.builder(context) .setUseHardwareAcousticEchoCanceler(setUseHardwareAcousticEchoCanceler) .createAudioDeviceModule() ) .createPeerConnectionFactory(); final MediaStream stream = peerConnectionFactory.createLocalMediaStream("my-media-stream"); final Optional<CapturerChoice> optionalCapturerChoice = media.contains(Media.VIDEO) ? getVideoCapturer() : Optional.absent(); if (optionalCapturerChoice.isPresent()) { this.capturerChoice = optionalCapturerChoice.get(); final CameraVideoCapturer capturer = this.capturerChoice.cameraVideoCapturer; final VideoSource videoSource = peerConnectionFactory.createVideoSource(false); SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create("webrtc", eglBase.getEglBaseContext()); capturer.initialize(surfaceTextureHelper, requireContext(), videoSource.getCapturerObserver()); Log.d(Config.LOGTAG, String.format("start capturing at %dx%d@%d", capturerChoice.captureFormat.width, capturerChoice.captureFormat.height, capturerChoice.getFrameRate())); capturer.startCapture(capturerChoice.captureFormat.width, capturerChoice.captureFormat.height, capturerChoice.getFrameRate()); this.localVideoTrack = peerConnectionFactory.createVideoTrack("my-video-track", videoSource); stream.addTrack(this.localVideoTrack); } if (media.contains(Media.AUDIO)) { //set up audio track final AudioSource audioSource = peerConnectionFactory.createAudioSource(new MediaConstraints()); this.localAudioTrack = peerConnectionFactory.createAudioTrack("my-audio-track", audioSource); stream.addTrack(this.localAudioTrack); } final PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(iceServers); rtcConfig.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.DISABLED; //XEP-0176 doesn't support tcp rtcConfig.continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_CONTINUALLY; final PeerConnection peerConnection = peerConnectionFactory.createPeerConnection(rtcConfig, peerConnectionObserver); if (peerConnection == null) { throw new InitializationException("Unable to create PeerConnection"); } peerConnection.addStream(stream); peerConnection.setAudioPlayout(true); peerConnection.setAudioRecording(true); this.peerConnection = peerConnection; }
Example #10
Source File: AndroidVideoSource.java From actor-platform with GNU Affero General Public License v3.0 | 4 votes |
public VideoSource getVideoSource() { return videoSource; }
Example #11
Source File: WebRTCWrapper.java From Conversations with GNU General Public License v3.0 | 4 votes |
synchronized void initializePeerConnection(final Set<Media> media, final List<PeerConnection.IceServer> iceServers) throws InitializationException { Preconditions.checkState(this.eglBase != null); Preconditions.checkNotNull(media); Preconditions.checkArgument(media.size() > 0, "media can not be empty when initializing peer connection"); final boolean setUseHardwareAcousticEchoCanceler = WebRtcAudioEffects.canUseAcousticEchoCanceler() && !HARDWARE_AEC_BLACKLIST.contains(Build.MODEL); Log.d(Config.LOGTAG, String.format("setUseHardwareAcousticEchoCanceler(%s) model=%s", setUseHardwareAcousticEchoCanceler, Build.MODEL)); PeerConnectionFactory peerConnectionFactory = PeerConnectionFactory.builder() .setVideoDecoderFactory(new DefaultVideoDecoderFactory(eglBase.getEglBaseContext())) .setVideoEncoderFactory(new DefaultVideoEncoderFactory(eglBase.getEglBaseContext(), true, true)) .setAudioDeviceModule(JavaAudioDeviceModule.builder(context) .setUseHardwareAcousticEchoCanceler(setUseHardwareAcousticEchoCanceler) .createAudioDeviceModule() ) .createPeerConnectionFactory(); final MediaStream stream = peerConnectionFactory.createLocalMediaStream("my-media-stream"); final Optional<CapturerChoice> optionalCapturerChoice = media.contains(Media.VIDEO) ? getVideoCapturer() : Optional.absent(); if (optionalCapturerChoice.isPresent()) { this.capturerChoice = optionalCapturerChoice.get(); final CameraVideoCapturer capturer = this.capturerChoice.cameraVideoCapturer; final VideoSource videoSource = peerConnectionFactory.createVideoSource(false); SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create("webrtc", eglBase.getEglBaseContext()); capturer.initialize(surfaceTextureHelper, requireContext(), videoSource.getCapturerObserver()); Log.d(Config.LOGTAG, String.format("start capturing at %dx%d@%d", capturerChoice.captureFormat.width, capturerChoice.captureFormat.height, capturerChoice.getFrameRate())); capturer.startCapture(capturerChoice.captureFormat.width, capturerChoice.captureFormat.height, capturerChoice.getFrameRate()); this.localVideoTrack = peerConnectionFactory.createVideoTrack("my-video-track", videoSource); stream.addTrack(this.localVideoTrack); } if (media.contains(Media.AUDIO)) { //set up audio track final AudioSource audioSource = peerConnectionFactory.createAudioSource(new MediaConstraints()); this.localAudioTrack = peerConnectionFactory.createAudioTrack("my-audio-track", audioSource); stream.addTrack(this.localAudioTrack); } final PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(iceServers); rtcConfig.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.DISABLED; //XEP-0176 doesn't support tcp rtcConfig.continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_CONTINUALLY; final PeerConnection peerConnection = peerConnectionFactory.createPeerConnection(rtcConfig, peerConnectionObserver); if (peerConnection == null) { throw new InitializationException("Unable to create PeerConnection"); } peerConnection.addStream(stream); peerConnection.setAudioPlayout(true); peerConnection.setAudioRecording(true); this.peerConnection = peerConnection; }