org.webrtc.VideoCapturerAndroid Java Examples
The following examples show how to use
org.webrtc.VideoCapturerAndroid.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: WebRtcClient.java From imsdk-android with MIT License | 6 votes |
public void switchCameraInternal() { if (CameraEnumerationAndroid.getDeviceCount()>1) { if (!pcParams.videoCallEnabled || isError || localMS == null) { LogUtil.e(TAG, "Failed to switch camera. Video: " + pcParams.videoCallEnabled + ". Error : " + isError); return; // No video is sent or only one camera is available or error happened. } LogUtil.d(TAG, "Switch camera"); videoCapturer.switchCamera(new VideoCapturerAndroid.CameraSwitchHandler() { @Override public void onCameraSwitchDone(boolean b) { Logger.i(TAG + "切换摄像头回调成功" + b ); } @Override public void onCameraSwitchError(String s) { Logger.i(TAG + "切换摄像头回调错误" + s ); } }); } else { LogUtil.d(TAG, "Will not switch camera, video caputurer is not a camera"); } }
Example #2
Source File: VideoChatHelper.java From Socket.io-FLSocketIM-Android with MIT License | 5 votes |
private void createLocalStream() { localMediaStream = factory.createLocalMediaStream("ARDAMS"); // 音频 AudioSource audioSource = factory.createAudioSource(new MediaConstraints()); localMediaStream.addTrack(factory.createAudioTrack("ARDAMSa0", audioSource)); // 视频 String frontCameraDeviceName = VideoCapturerAndroid.getNameOfFrontFacingDevice(); VideoCapturer capture = VideoCapturerAndroid.create(frontCameraDeviceName); VideoSource videoSource = factory.createVideoSource(capture, localVideoConstraints()); localMediaStream.addTrack(factory.createVideoTrack("ARDAMSv0", videoSource)); callBack.onSetLocalStream(localMediaStream, myId); }
Example #3
Source File: PeerConnectionClient.java From Yahala-Messenger with MIT License | 5 votes |
private VideoTrack createVideoTrack(VideoCapturerAndroid capturer) { videoSource = factory.createVideoSource(capturer, videoConstraints); localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource); localVideoTrack.setEnabled(renderVideo); localVideoTrack.addRenderer(new VideoRenderer(localRender)); return localVideoTrack; }
Example #4
Source File: MediaResourceManager.java From webrtcpeer-android with Apache License 2.0 | 5 votes |
private VideoTrack createCapturerVideoTrack(VideoCapturerAndroid capturer) { videoSource = factory.createVideoSource(capturer, videoConstraints); localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource); localVideoTrack.setEnabled(renderVideo); localVideoTrack.addRenderer(new VideoRenderer(localRender)); return localVideoTrack; }
Example #5
Source File: PeerConnectionClient.java From Yahala-Messenger with MIT License | 4 votes |
private void createPeerConnectionInternal(EGLContext renderEGLContext) { if (factory == null || isError) { Log.e(TAG, "Peerconnection factory is not created"); return; } Log.d(TAG, "Create peer connection."); Log.d(TAG, "PCConstraints: " + pcConstraints.toString()); if (videoConstraints != null) { Log.d(TAG, "VideoConstraints: " + videoConstraints.toString()); } queuedRemoteCandidates = new LinkedList<IceCandidate>(); if (videoCallEnabled) { Log.d(TAG, "EGLContext: " + renderEGLContext); factory.setVideoHwAccelerationOptions(renderEGLContext, renderEGLContext); } PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(signalingParameters.iceServers); // TCP candidates are only useful when connecting to a server that supports // ICE-TCP. rtcConfig.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.DISABLED; rtcConfig.bundlePolicy = PeerConnection.BundlePolicy.MAXBUNDLE; rtcConfig.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE; // Use ECDSA encryption. rtcConfig.keyType = PeerConnection.KeyType.ECDSA; peerConnection = factory.createPeerConnection( rtcConfig, pcConstraints, pcObserver); isInitiator = false; // Set default WebRTC tracing and INFO libjingle logging. // NOTE: this _must_ happen while |factory| is alive! Logging.enableTracing( "logcat:", EnumSet.of(Logging.TraceLevel.TRACE_DEFAULT), Logging.Severity.LS_INFO); mediaStream = factory.createLocalMediaStream("ARDAMS"); if (videoCallEnabled) { String cameraDeviceName = CameraEnumerationAndroid.getDeviceName(0); String frontCameraDeviceName = CameraEnumerationAndroid.getNameOfFrontFacingDevice(); if (numberOfCameras > 1 && frontCameraDeviceName != null) { cameraDeviceName = frontCameraDeviceName; } Log.d(TAG, "Opening camera: " + cameraDeviceName); videoCapturer = VideoCapturerAndroid.create(cameraDeviceName, null, peerConnectionParameters.captureToTexture ? renderEGLContext : null); if (videoCapturer == null) { reportError("Failed to open camera"); return; } mediaStream.addTrack(createVideoTrack(videoCapturer)); } mediaStream.addTrack(factory.createAudioTrack( AUDIO_TRACK_ID, factory.createAudioSource(audioConstraints))); peerConnection.addStream(mediaStream); Log.d(TAG, "Peer connection created."); }
Example #6
Source File: MainActivity.java From krankygeek with MIT License | 4 votes |
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); AudioManager audioManager = (AudioManager) this.getSystemService(Context.AUDIO_SERVICE); audioManager.setMode(AudioManager.MODE_IN_COMMUNICATION); audioManager.setSpeakerphoneOn(true); PeerConnectionFactory.initializeAndroidGlobals( this, // Context true, // Audio Enabled true, // Video Enabled true, // Hardware Acceleration Enabled null); // Render EGL Context peerConnectionFactory = new PeerConnectionFactory(); VideoCapturerAndroid vc = VideoCapturerAndroid.create(VideoCapturerAndroid.getNameOfFrontFacingDevice(), null); localVideoSource = peerConnectionFactory.createVideoSource(vc, new MediaConstraints()); VideoTrack localVideoTrack = peerConnectionFactory.createVideoTrack(VIDEO_TRACK_ID, localVideoSource); localVideoTrack.setEnabled(true); AudioSource audioSource = peerConnectionFactory.createAudioSource(new MediaConstraints()); AudioTrack localAudioTrack = peerConnectionFactory.createAudioTrack(AUDIO_TRACK_ID, audioSource); localAudioTrack.setEnabled(true); localMediaStream = peerConnectionFactory.createLocalMediaStream(LOCAL_STREAM_ID); localMediaStream.addTrack(localVideoTrack); localMediaStream.addTrack(localAudioTrack); GLSurfaceView videoView = (GLSurfaceView) findViewById(R.id.glview_call); VideoRendererGui.setView(videoView, null); try { otherPeerRenderer = VideoRendererGui.createGui(0, 0, 100, 100, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true); VideoRenderer renderer = VideoRendererGui.createGui(50, 50, 50, 50, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true); localVideoTrack.addRenderer(renderer); } catch (Exception e) { e.printStackTrace(); } }
Example #7
Source File: VideoChatActivity.java From AndroidRTC with MIT License | 4 votes |
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_video_chat); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); Bundle extras = getIntent().getExtras(); if (extras == null || !extras.containsKey(Constants.USER_NAME)) { Intent intent = new Intent(this, MainActivity.class); startActivity(intent); Toast.makeText(this, "Need to pass username to VideoChatActivity in intent extras (Constants.USER_NAME).", Toast.LENGTH_SHORT).show(); finish(); return; } this.username = extras.getString(Constants.USER_NAME, ""); this.mChatList = getListView(); this.mChatEditText = (EditText) findViewById(R.id.chat_input); this.mCallStatus = (TextView) findViewById(R.id.call_status); // Set up the List View for chatting List<ChatMessage> ll = new LinkedList<ChatMessage>(); mChatAdapter = new ChatAdapter(this, ll); mChatList.setAdapter(mChatAdapter); // First, we initiate the PeerConnectionFactory with our application context and some options. PeerConnectionFactory.initializeAndroidGlobals( this, // Context true, // Audio Enabled true, // Video Enabled true, // Hardware Acceleration Enabled null); // Render EGL Context PeerConnectionFactory pcFactory = new PeerConnectionFactory(); this.pnRTCClient = new PnRTCClient(Constants.PUB_KEY, Constants.SUB_KEY, this.username); List<PeerConnection.IceServer> servers = getXirSysIceServers(); if (!servers.isEmpty()){ this.pnRTCClient.setSignalParams(new PnSignalingParams()); } // Returns the number of cams & front/back face device name int camNumber = VideoCapturerAndroid.getDeviceCount(); String frontFacingCam = VideoCapturerAndroid.getNameOfFrontFacingDevice(); String backFacingCam = VideoCapturerAndroid.getNameOfBackFacingDevice(); // Creates a VideoCapturerAndroid instance for the device name VideoCapturer capturer = VideoCapturerAndroid.create(frontFacingCam); // First create a Video Source, then we can make a Video Track localVideoSource = pcFactory.createVideoSource(capturer, this.pnRTCClient.videoConstraints()); VideoTrack localVideoTrack = pcFactory.createVideoTrack(VIDEO_TRACK_ID, localVideoSource); // First we create an AudioSource then we can create our AudioTrack AudioSource audioSource = pcFactory.createAudioSource(this.pnRTCClient.audioConstraints()); AudioTrack localAudioTrack = pcFactory.createAudioTrack(AUDIO_TRACK_ID, audioSource); // To create our VideoRenderer, we can use the included VideoRendererGui for simplicity // First we need to set the GLSurfaceView that it should render to this.videoView = (GLSurfaceView) findViewById(R.id.gl_surface); // Then we set that view, and pass a Runnable to run once the surface is ready VideoRendererGui.setView(videoView, null); // Now that VideoRendererGui is ready, we can get our VideoRenderer. // IN THIS ORDER. Effects which is on top or bottom remoteRender = VideoRendererGui.create(0, 0, 100, 100, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, false); localRender = VideoRendererGui.create(0, 0, 100, 100, VideoRendererGui.ScalingType.SCALE_ASPECT_FILL, true); // We start out with an empty MediaStream object, created with help from our PeerConnectionFactory // Note that LOCAL_MEDIA_STREAM_ID can be any string MediaStream mediaStream = pcFactory.createLocalMediaStream(LOCAL_MEDIA_STREAM_ID); // Now we can add our tracks. mediaStream.addTrack(localVideoTrack); mediaStream.addTrack(localAudioTrack); // First attach the RTC Listener so that callback events will be triggered this.pnRTCClient.attachRTCListener(new DemoRTCListener()); // Then attach your local media stream to the PnRTCClient. // This will trigger the onLocalStream callback. this.pnRTCClient.attachLocalMediaStream(mediaStream); // Listen on a channel. This is your "phone number," also set the max chat users. this.pnRTCClient.listenOn("Kevin"); this.pnRTCClient.setMaxConnections(1); // If the intent contains a number to dial, call it now that you are connected. // Else, remain listening for a call. if (extras.containsKey(Constants.CALL_USER)) { String callUser = extras.getString(Constants.CALL_USER, ""); connectToUser(callUser); } }
Example #8
Source File: MediaResourceManager.java From webrtcpeer-android with Apache License 2.0 | 4 votes |
void createLocalMediaStream(Object renderEGLContext,final VideoRenderer.Callbacks localRender) { if (factory == null) { Log.e(TAG, "Peerconnection factory is not created"); return; } this.localRender = localRender; if (videoCallEnabled) { factory.setVideoHwAccelerationOptions(renderEGLContext, renderEGLContext); } // Set default WebRTC tracing and INFO libjingle logging. // NOTE: this _must_ happen while |factory| is alive! Logging.enableTracing("logcat:", EnumSet.of(Logging.TraceLevel.TRACE_DEFAULT), Logging.Severity.LS_INFO); localMediaStream = factory.createLocalMediaStream("ARDAMS"); // If video call is enabled and the device has camera(s) if (videoCallEnabled && numberOfCameras > 0) { String cameraDeviceName; // = CameraEnumerationAndroid.getDeviceName(0); String frontCameraDeviceName = CameraEnumerationAndroid.getNameOfFrontFacingDevice(); String backCameraDeviceName = CameraEnumerationAndroid.getNameOfBackFacingDevice(); // If current camera is set to front and the device has one if (currentCameraPosition==NBMCameraPosition.FRONT && frontCameraDeviceName!=null) { cameraDeviceName = frontCameraDeviceName; } // If current camera is set to back and the device has one else if (currentCameraPosition==NBMCameraPosition.BACK && backCameraDeviceName!=null) { cameraDeviceName = backCameraDeviceName; } // If current camera is set to any then we pick the first camera of the device, which // should be a back-facing camera according to libjingle API else { cameraDeviceName = CameraEnumerationAndroid.getDeviceName(0); currentCameraPosition = NBMCameraPosition.BACK; } Log.d(TAG, "Opening camera: " + cameraDeviceName); videoCapturer = VideoCapturerAndroid.create(cameraDeviceName, null); if (videoCapturer == null) { Log.d(TAG, "Error while opening camera"); return; } localMediaStream.addTrack(createCapturerVideoTrack(videoCapturer)); } // Create audio track localMediaStream.addTrack(factory.createAudioTrack(AUDIO_TRACK_ID, factory.createAudioSource(audioConstraints))); Log.d(TAG, "Local media stream created."); }