Java Code Examples for org.kurento.client.MediaPipeline#release()
The following examples show how to use
org.kurento.client.MediaPipeline#release() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: FakeKmsService.java From kurento-java with Apache License 2.0 | 6 votes |
public void releaseAllFakePipelines(long timeBetweenClientMs, SystemMonitorManager monitor) { for (int i = 0; i < fakeWebRtcList.size(); i++) { monitor.decrementNumClients(); waitMs(timeBetweenClientMs); } for (WebRtcEndpoint fakeBrowser : fakeBrowserList) { fakeBrowser.release(); waitMs(timeBetweenClientMs); } for (MediaPipeline fakeMediaPipeline : fakeMediaPipelineList) { fakeMediaPipeline.release(); } fakeWebRtcList = new ArrayList<>(); fakeBrowserList = new ArrayList<>(); fakeMediaPipelineList = new ArrayList<MediaPipeline>(); }
Example 2
Source File: Handler.java From kurento-tutorial-java with Apache License 2.0 | 6 votes |
private void stop(final WebSocketSession session) { log.info("[Handler::stop]"); // Update the UI sendPlayEnd(session); // Remove the user session and release all resources String sessionId = session.getId(); UserSession user = users.remove(sessionId); if (user != null) { MediaPipeline mediaPipeline = user.getMediaPipeline(); if (mediaPipeline != null) { log.info("[Handler::stop] Release the Media Pipeline"); mediaPipeline.release(); } } }
Example 3
Source File: PlayerMultiplePauseTest.java From kurento-java with Apache License 2.0 | 5 votes |
@Test public void testPlayerMultiplePause() throws Exception { // Test data final String mediaUrl = "http://" + getTestFilesHttpPath() + "/video/60sec/red.webm"; final Color expectedColor = Color.RED; final int playTimeSeconds = 2; final int pauseTimeSeconds = 2; final int numPauses = 30; // Media Pipeline MediaPipeline mp = kurentoClient.createMediaPipeline(); PlayerEndpoint playerEp = new PlayerEndpoint.Builder(mp, mediaUrl).build(); WebRtcEndpoint webRtcEp = new WebRtcEndpoint.Builder(mp).build(); playerEp.connect(webRtcEp); // WebRTC in receive-only mode getPage().subscribeEvents("playing"); getPage().initWebRtc(webRtcEp, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.RCV_ONLY); playerEp.play(); Assert.assertTrue("Not received media (timeout waiting playing event)", getPage().waitForEvent("playing")); for (int i = 0; i < numPauses; i++) { // Assert color Assert.assertTrue("The color of the video should be " + expectedColor, getPage().similarColor(expectedColor)); // Pause and wait playerEp.pause(); Thread.sleep(TimeUnit.SECONDS.toMillis(pauseTimeSeconds)); // Resume video playerEp.play(); Thread.sleep(TimeUnit.SECONDS.toMillis(playTimeSeconds)); } // Release Media Pipeline mp.release(); }
Example 4
Source File: MetaTest1Test.java From kurento-java with Apache License 2.0 | 5 votes |
@Test public void test() { // Media Pipeline MediaPipeline mp = kurentoClient.createMediaPipeline(); // Check page loaded WebElement element = getPage().getBrowser().getWebDriver().findElement(By.cssSelector("#testTitle")); Assert.assertThat(element.getText(), IsEqual.equalTo("WebRTC test")); // Release Media Pipeline mp.release(); }
Example 5
Source File: MetaTest2Test.java From kurento-java with Apache License 2.0 | 5 votes |
@Test public void test() { // Media Pipeline MediaPipeline mp = kurentoClient.createMediaPipeline(); // Check page loaded WebElement element = getPage().getBrowser().getWebDriver().findElement(By.cssSelector("#testTitle")); Assert.assertThat(element.getText(), IsEqual.equalTo("WebRTC test")); // Release Media Pipeline mp.release(); }
Example 6
Source File: RecorderSwitchPlayerWebRtcTest.java From kurento-java with Apache License 2.0 | 4 votes |
public void doTest(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec, String expectedAudioCodec, String extension) throws Exception { // Media Pipeline #1 MediaPipeline mp = kurentoClient.createMediaPipeline(); WebRtcEndpoint webRtcEp = new WebRtcEndpoint.Builder(mp).build(); PlayerEndpoint playerRed = new PlayerEndpoint.Builder(mp, getPlayerUrl("/video/10sec/red.webm")).build(); playerRed.connect(webRtcEp); // Test execution getPage(0).subscribeLocalEvents("playing"); getPage(0).initWebRtc(webRtcEp, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY); playerRed.play(); // red Assert.assertTrue("Not received media (timeout waiting playing event)", getPage(0).waitForEvent("playing")); PlayerEndpoint playerGreen = new PlayerEndpoint.Builder(mp, getPlayerUrl("/video/10sec/green.webm")).build(); String recordingFile = getRecordUrl(extension); RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFile) .withMediaProfile(mediaProfileSpecType).build(); playerGreen.play(); recorderEp.record(); for (int i = 0; i < NUM_SWAPS; i++) { if (i % 2 == 0) { playerRed.connect(recorderEp); } else { playerGreen.connect(recorderEp); } Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / NUM_SWAPS); } // Release Media Pipeline #1 saveGstreamerDot(mp); final CountDownLatch recorderLatch = new CountDownLatch(1); recorderEp.stopAndWait(new Continuation<Void>() { @Override public void onSuccess(Void result) throws Exception { recorderLatch.countDown(); } @Override public void onError(Throwable cause) throws Exception { recorderLatch.countDown(); } }); Assert.assertTrue("Not stop properly", recorderLatch.await(getPage(0).getTimeout(), TimeUnit.SECONDS)); mp.release(); // Reloading browser getPage(0).close(); checkRecordingFile(recordingFile, "browser1", EXPECTED_COLORS, PLAYTIME, expectedVideoCodec, expectedAudioCodec); success = true; }
Example 7
Source File: WebRtcFourOneToManyTest.java From kurento-java with Apache License 2.0 | 4 votes |
@Test public void testWebRtcOneToManyChrome() throws InterruptedException, IOException { // Media Pipeline final MediaPipeline mp = kurentoClient.createMediaPipeline(); final WebRtcEndpoint masterWebRtcEp = new WebRtcEndpoint.Builder(mp).build(); // Assets for viewers final LatencyController[] cs = new LatencyController[numViewers]; final WebRtcEndpoint[] viewerWebRtcEPs = new WebRtcEndpoint[numViewers]; final CountDownLatch latch = new CountDownLatch(numViewers); // Presenter getPresenter().subscribeLocalEvents("playing"); getPresenter().initWebRtc(masterWebRtcEp, WebRtcChannel.VIDEO_ONLY, WebRtcMode.SEND_ONLY); if (monitor != null) { monitor.addWebRtcClientAndActivateOutboundStats(getPresenter().getBrowser().getId(), masterWebRtcEp, getPresenter(), "webRtcPeer.peerConnection"); } // Viewers ExecutorService exec = Executors.newFixedThreadPool(numViewers); for (int j = 0; j < numViewers; j++) { final int i = j; Thread thread = new Thread() { @Override public void run() { try { viewerWebRtcEPs[i] = new WebRtcEndpoint.Builder(mp).build(); masterWebRtcEp.connect(viewerWebRtcEPs[i]); if (monitor != null) { monitor.incrementNumClients(); } // Latency control String name = getViewer(i).getBrowser().getId(); cs[i] = new LatencyController(name, monitor); // WebRTC getViewer(i).subscribeEvents("playing"); getViewer(i).initWebRtc(viewerWebRtcEPs[i], WebRtcChannel.VIDEO_ONLY, WebRtcMode.RCV_ONLY); if (monitor != null) { monitor.addWebRtcClientAndActivateInboundStats(getViewer(i).getBrowser().getId(), viewerWebRtcEPs[i], getViewer(i), "webRtcPeer.peerConnection"); } // Latency assessment cs[i].checkLatency(PLAYTIME, TimeUnit.SECONDS, getPresenter(), getViewer(i)); cs[i].drawChart(getDefaultOutputFile("-" + name + "-latency.png"), 500, 270); cs[i].writeCsv(getDefaultOutputFile("-" + name + "-latency.csv")); cs[i].logLatencyErrorrs(); } catch (Exception e) { e.printStackTrace(); } finally { latch.countDown(); if (monitor != null) { monitor.decrementNumClients(); } } } }; exec.execute(thread); } // Wait to finish viewers threads latch.await(); // Release Media Pipeline mp.release(); }
Example 8
Source File: RecorderSwitchWebRtcWebRtcAndPlayerTest.java From kurento-java with Apache License 2.0 | 4 votes |
public void doTest(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec, String expectedAudioCodec, String extension) throws Exception { // Media Pipeline #1 MediaPipeline mp = kurentoClient.createMediaPipeline(); final CountDownLatch errorPipelinelatch = new CountDownLatch(1); mp.addErrorListener(new EventListener<ErrorEvent>() { @Override public void onEvent(ErrorEvent event) { msgError = "Description:" + event.getDescription() + "; Error code:" + event.getType(); errorPipelinelatch.countDown(); } }); WebRtcEndpoint webRtcEpRed = new WebRtcEndpoint.Builder(mp).build(); WebRtcEndpoint webRtcEpGreen = new WebRtcEndpoint.Builder(mp).build(); String recordingFile = getRecordUrl(extension); RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFile) .withMediaProfile(mediaProfileSpecType).build(); // Test execution getPage(BROWSER1).subscribeLocalEvents("playing"); long startWebrtc = System.currentTimeMillis(); getPage(BROWSER1).initWebRtc(webRtcEpRed, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY); webRtcEpRed.connect(recorderEp); recorderEp.record(); Assert.assertTrue("Not received media (timeout waiting playing event)", getPage(BROWSER1).waitForEvent("playing")); long webrtcRedConnectionTime = System.currentTimeMillis() - startWebrtc; Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER); getPage(BROWSER2).subscribeLocalEvents("playing"); startWebrtc = System.currentTimeMillis(); getPage(BROWSER2).initWebRtc(webRtcEpGreen, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY); // green webRtcEpGreen.connect(recorderEp); Assert.assertTrue("Not received media (timeout waiting playing event)", getPage(BROWSER2).waitForEvent("playing")); long webrtcGreenConnectionTime = System.currentTimeMillis() - startWebrtc; Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER); webRtcEpRed.connect(recorderEp); startWebrtc = System.currentTimeMillis(); Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER); // Release Media Pipeline #1 saveGstreamerDot(mp); final CountDownLatch recorderLatch = new CountDownLatch(1); recorderEp.stopAndWait(new Continuation<Void>() { @Override public void onSuccess(Void result) throws Exception { recorderLatch.countDown(); } @Override public void onError(Throwable cause) throws Exception { recorderLatch.countDown(); } }); Assert.assertTrue("Not stop properly", recorderLatch.await(getPage(BROWSER2).getTimeout(), TimeUnit.SECONDS)); mp.release(); Assert.assertTrue(msgError, errorPipelinelatch.getCount() == 1); final long playtime = PLAYTIME + TimeUnit.MILLISECONDS .toSeconds((2 * webrtcRedConnectionTime) + webrtcGreenConnectionTime); checkRecordingFile(recordingFile, BROWSER3, EXPECTED_COLORS, playtime, expectedVideoCodec, expectedAudioCodec); success = true; }
Example 9
Source File: WebRtcFakeMediaTest.java From kurento-java with Apache License 2.0 | 4 votes |
@Test public void testWebRtcLoopback() throws Exception { // Media Pipeline MediaPipeline mp = kurentoClient.createMediaPipeline(); WebRtcEndpoint webRtcEndpoint = new WebRtcEndpoint.Builder(mp).build(); webRtcEndpoint.connect(webRtcEndpoint); final CountDownLatch flowingLatch = new CountDownLatch(1); webRtcEndpoint .addMediaFlowInStateChangeListener(new EventListener<MediaFlowInStateChangeEvent>() { @Override public void onEvent(MediaFlowInStateChangeEvent event) { if (event.getState().equals(MediaFlowState.FLOWING)) { flowingLatch.countDown(); } } }); // Start WebRTC and wait for playing event getPage().subscribeEvents("playing"); getPage().initWebRtc(webRtcEndpoint, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_RCV); Assert.assertTrue("Not received FLOWING IN event in webRtcEp: " + WebRtcChannel.AUDIO_AND_VIDEO, flowingLatch.await(getPage().getTimeout(), TimeUnit.SECONDS)); Assert.assertTrue( "Not received media (timeout waiting playing event): " + WebRtcChannel.AUDIO_AND_VIDEO, getPage().waitForEvent("playing")); // Guard time to play the video waitSeconds(PLAYTIME); // Assertions double currentTime = getPage().getCurrentTime(); Assert.assertTrue( "Error in play time (expected: " + PLAYTIME + " sec, real: " + currentTime + " sec)", getPage().compare(PLAYTIME, currentTime)); // Release Media Pipeline mp.release(); }
Example 10
Source File: RecorderPlayerDisconnectTest.java From kurento-java with Apache License 2.0 | 4 votes |
public void doTest(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec, String expectedAudioCodec, String extension) throws Exception { final CountDownLatch recorderLatch = new CountDownLatch(1); // Media Pipeline #1 MediaPipeline mp = kurentoClient.createMediaPipeline(); PlayerEndpoint playerGreen = new PlayerEndpoint.Builder(mp, getPlayerUrl("/video/10sec/green.webm")).build(); String recordingFile = getRecordUrl(extension); RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFile) .withMediaProfile(mediaProfileSpecType).build(); playerGreen.play(); recorderEp.record(); for (int i = 0; i < NUM_SWAPS; i++) { if (i % 2 == 0) { playerGreen.connect(recorderEp); } else { playerGreen.disconnect(recorderEp); } Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / NUM_SWAPS); } // Release Media Pipeline #1 saveGstreamerDot(mp); recorderEp.stopAndWait(new Continuation<Void>() { @Override public void onSuccess(Void result) throws Exception { recorderLatch.countDown(); } @Override public void onError(Throwable cause) throws Exception { recorderLatch.countDown(); } }); Assert.assertTrue("Not stop properly", recorderLatch.await(getPage().getTimeout(), TimeUnit.SECONDS)); mp.release(); // Wait until file exists waitForFileExists(recordingFile); // Reloading browser getPage().reload(); // Media Pipeline #2 MediaPipeline mp2 = kurentoClient.createMediaPipeline(); PlayerEndpoint playerEp2 = new PlayerEndpoint.Builder(mp2, recordingFile).build(); WebRtcEndpoint webRtcEp2 = new WebRtcEndpoint.Builder(mp2).build(); playerEp2.connect(webRtcEp2); // Playing the recording getPage().subscribeEvents("playing"); getPage().initWebRtc(webRtcEp2, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.RCV_ONLY); final CountDownLatch eosLatch = new CountDownLatch(1); playerEp2.addEndOfStreamListener(new EventListener<EndOfStreamEvent>() { @Override public void onEvent(EndOfStreamEvent event) { eosLatch.countDown(); } }); playerEp2.play(); // Assertions in recording final String messageAppend = "[played file with media pipeline]"; final int playtime = PLAYTIME; Assert.assertTrue( "Not received media in the recording (timeout waiting playing event) " + messageAppend, getPage().waitForEvent("playing")); for (Color color : EXPECTED_COLORS) { Assert.assertTrue("The color of the recorded video should be " + color + " " + messageAppend, getPage().similarColor(color)); } Assert.assertTrue("Not received EOS event in player", eosLatch.await(getPage().getTimeout(), TimeUnit.SECONDS)); double currentTime = getPage().getCurrentTime(); Assert.assertTrue("Error in play time in the recorded video (expected: " + playtime + " sec, real: " + currentTime + " sec) " + messageAppend, getPage().compare(playtime, currentTime)); AssertMedia.assertCodecs(recordingFile, expectedVideoCodec, expectedAudioCodec); AssertMedia.assertDuration(recordingFile, TimeUnit.SECONDS.toMillis(playtime), TimeUnit.SECONDS.toMillis(getPage().getThresholdTime())); // Release Media Pipeline #2 mp2.release(); success = true; }
Example 11
Source File: LongStabilityRecorderS3Test.java From kurento-java with Apache License 2.0 | 4 votes |
public void doTest(final MediaProfileSpecType mediaProfileSpecType, String expectedAudioCodec, final String extension) throws Exception { long testDurationMillis = PropertiesManager.getProperty(TEST_DURATION_PROPERTY, DEFAULT_TEST_DURATION); MediaPipeline mp = kurentoClient.createMediaPipeline(); final CountDownLatch errorPipelinelatch = new CountDownLatch(1); mp.addErrorListener(new EventListener<ErrorEvent>() { @Override public void onEvent(ErrorEvent event) { msgError = "Description:" + event.getDescription() + "; Error code:" + event.getType(); log.error(msgError); errorPipelinelatch.countDown(); } }); final WebRtcEndpoint webRtcSender = new WebRtcEndpoint.Builder(mp).build(); // WebRTC sender negotiation getPage().subscribeLocalEvents("playing"); getPage().initWebRtc(webRtcSender, WebRtcChannel.AUDIO_ONLY, WebRtcMode.SEND_ONLY); Assert.assertTrue("Not received media in sender webrtc", getPage().waitForEvent("playing")); // Recorder String recordingFile = getRecordUrl(extension); RecorderEndpoint recorder = new RecorderEndpoint.Builder(mp, recordingFile) .withMediaProfile(mediaProfileSpecType).build(); webRtcSender.connect(recorder); // Start recorder recorder.record(); // Wait recording time Thread.sleep(testDurationMillis); // Stop recorder final CountDownLatch recorderLatch = new CountDownLatch(1); recorder.stopAndWait(new Continuation<Void>() { @Override public void onSuccess(Void result) throws Exception { recorderLatch.countDown(); } @Override public void onError(Throwable cause) throws Exception { recorderLatch.countDown(); } }); // Release Media Pipeline Assert.assertTrue("Not stop properly", recorderLatch.await(getPage().getTimeout(), TimeUnit.SECONDS)); if (mp != null) { mp.release(); } Assert.assertTrue(msgError, errorPipelinelatch.getCount() == 1); waitForFileExists(recordingFile); // Assessments AssertMedia.assertDuration(recordingFile, testDurationMillis, THRESHOLD_MS); }
Example 12
Source File: WebRtcThreeSwitchTest.java From kurento-java with Apache License 2.0 | 4 votes |
@Test public void testWebRtcSwitch() throws InterruptedException { // Media Pipeline MediaPipeline mp = kurentoClient.createMediaPipeline(); WebRtcEndpoint[] webRtcEndpoints = new WebRtcEndpoint[NUM_BROWSERS]; for (int i = 0; i < NUM_BROWSERS; i++) { webRtcEndpoints[i] = new WebRtcEndpoint.Builder(mp).build(); webRtcEndpoints[i].connect(webRtcEndpoints[i]); // Start WebRTC in loopback in each browser getPage(i).subscribeEvents("playing"); getPage(i).initWebRtc(webRtcEndpoints[i], WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_RCV); // Delay time (to avoid the same timing in videos) waitSeconds(1); // Wait until event playing in the remote streams Assert.assertTrue("Not received media #1 (timeout waiting playing event)", getPage(i).waitForEvent("playing")); // Assert color assertColor(i); } // Guard time to see switching #0 waitSeconds(PLAYTIME); // Switching (round #1) for (int i = 0; i < NUM_BROWSERS; i++) { int next = i + 1 >= NUM_BROWSERS ? 0 : i + 1; webRtcEndpoints[i].connect(webRtcEndpoints[next]); getPage(i).consoleLog(ConsoleLogLevel.INFO, "Switch #1: webRtcEndpoint" + i + " -> webRtcEndpoint" + next); // Assert color assertColor(i); } // Guard time to see switching #1 waitSeconds(PLAYTIME); // Switching (round #2) for (int i = 0; i < NUM_BROWSERS; i++) { int previous = i - 1 < 0 ? NUM_BROWSERS - 1 : i - 1; webRtcEndpoints[i].connect(webRtcEndpoints[previous]); getPage(i).consoleLog(ConsoleLogLevel.INFO, "Switch #2: webRtcEndpoint" + i + " -> webRtcEndpoint" + previous); // Assert color assertColor(i); } // Guard time to see switching #2 waitSeconds(PLAYTIME); // Release Media Pipeline mp.release(); }
Example 13
Source File: RecorderPlayerSwitchSequentialTest.java From kurento-java with Apache License 2.0 | 4 votes |
public void doTest(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec, String expectedAudioCodec, String extension) throws Exception { final CountDownLatch recorderLatch = new CountDownLatch(1); MediaPipeline mp = null; // Media Pipeline mp = kurentoClient.createMediaPipeline(); PlayerEndpoint playerEp1 = new PlayerEndpoint.Builder(mp, getPlayerUrl("/video/60sec/ball.webm")).build(); PlayerEndpoint playerEp2 = new PlayerEndpoint.Builder(mp, getPlayerUrl("/video/60sec/smpte.webm")).build(); String recordingFile = getRecordUrl(extension); RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFile) .withMediaProfile(mediaProfileSpecType).build(); // Start play and record playerEp1.play(); playerEp2.play(); recorderEp.record(); // Switch players for (int i = 0; i < SWITCH_TIMES; i++) { if (i % 2 == 0) { playerEp1.connect(recorderEp); } else { playerEp2.connect(recorderEp); } Thread.sleep(SWITCH_RATE_MS); } // Stop play and record playerEp1.stop(); playerEp2.stop(); recorderEp.stop(new Continuation<Void>() { @Override public void onSuccess(Void result) throws Exception { recorderLatch.countDown(); } @Override public void onError(Throwable cause) throws Exception { recorderLatch.countDown(); } }); Assert.assertTrue("Not stop properly", recorderLatch.await(TIMEOUT, TimeUnit.SECONDS)); // Assessments long expectedTimeMs = SWITCH_TIMES * SWITCH_RATE_MS; AssertMedia.assertCodecs(recordingFile, expectedVideoCodec, expectedAudioCodec); AssertMedia.assertDuration(recordingFile, expectedTimeMs, THRESHOLD_MS); // Release Media Pipeline if (mp != null) { mp.release(); } }
Example 14
Source File: RecorderSwitchWebrtcTest.java From kurento-java with Apache License 2.0 | 4 votes |
public void doTest(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec, String expectedAudioCodec, String extension) throws Exception { // Media Pipeline #1 MediaPipeline mp = kurentoClient.createMediaPipeline(); WebRtcEndpoint webRtcEpRed = new WebRtcEndpoint.Builder(mp).build(); WebRtcEndpoint webRtcEpGreen = new WebRtcEndpoint.Builder(mp).build(); WebRtcEndpoint webRtcEpBlue = new WebRtcEndpoint.Builder(mp).build(); String recordingFile = getRecordUrl(extension); RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFile) .withMediaProfile(mediaProfileSpecType).build(); // Test execution getPage(BROWSER1).subscribeLocalEvents("playing"); long startWebrtc = System.currentTimeMillis(); getPage(BROWSER1).initWebRtc(webRtcEpRed, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY); getPage(BROWSER2).subscribeLocalEvents("playing"); getPage(BROWSER2).initWebRtc(webRtcEpGreen, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY); getPage(BROWSER3).subscribeLocalEvents("playing"); getPage(BROWSER3).initWebRtc(webRtcEpBlue, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY); webRtcEpRed.connect(recorderEp); recorderEp.record(); Assert.assertTrue("Not received media (timeout waiting playing event)", getPage(BROWSER1).waitForEvent("playing")); long webrtcRedConnectionTime = System.currentTimeMillis() - startWebrtc; Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER); startWebrtc = System.currentTimeMillis(); // green webRtcEpGreen.connect(recorderEp); Assert.assertTrue("Not received media (timeout waiting playing event)", getPage(BROWSER2).waitForEvent("playing")); long webrtcGreenConnectionTime = System.currentTimeMillis() - startWebrtc; Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER); startWebrtc = System.currentTimeMillis(); // blue webRtcEpBlue.connect(recorderEp); Assert.assertTrue("Not received media (timeout waiting playing event)", getPage(BROWSER3).waitForEvent("playing")); long webrtcBlueConnectionTime = System.currentTimeMillis() - startWebrtc; Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME) / N_PLAYER); // Release Media Pipeline #1 saveGstreamerDot(mp); final CountDownLatch recorderLatch = new CountDownLatch(1); recorderEp.stopAndWait(new Continuation<Void>() { @Override public void onSuccess(Void result) throws Exception { recorderLatch.countDown(); } @Override public void onError(Throwable cause) throws Exception { recorderLatch.countDown(); } }); Assert.assertTrue("Not stop properly", recorderLatch.await(getPage(BROWSER3).getTimeout(), TimeUnit.SECONDS)); mp.release(); // Reloading browser getPage(BROWSER3).close(); long playtime = PLAYTIME + TimeUnit.MILLISECONDS .toSeconds(webrtcRedConnectionTime + webrtcGreenConnectionTime + webrtcBlueConnectionTime); checkRecordingFile(recordingFile, BROWSER4, EXPECTED_COLORS, playtime, expectedVideoCodec, expectedAudioCodec); success = true; }
Example 15
Source File: WebRtcOneLoopbackTest.java From kurento-java with Apache License 2.0 | 4 votes |
@Test public void testWebRtcLoopback() throws Exception { // Media Pipeline MediaPipeline mp = kurentoClient.createMediaPipeline(); WebRtcEndpoint webRtcEndpoint = new WebRtcEndpoint.Builder(mp).build(); webRtcEndpoint.connect(webRtcEndpoint); final CountDownLatch flowingLatch = new CountDownLatch(1); webRtcEndpoint .addMediaFlowInStateChangeListener(new EventListener<MediaFlowInStateChangeEvent>() { @Override public void onEvent(MediaFlowInStateChangeEvent event) { if (event.getState().equals(MediaFlowState.FLOWING)) { flowingLatch.countDown(); } } }); // Start WebRTC and wait for playing event getPage().subscribeEvents("playing"); getPage().initWebRtc(webRtcEndpoint, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_RCV); Assert.assertTrue("Not received FLOWING IN event in webRtcEp: " + WebRtcChannel.AUDIO_AND_VIDEO, flowingLatch.await(getPage().getTimeout(), TimeUnit.SECONDS)); Assert.assertTrue( "Not received media (timeout waiting playing event): " + WebRtcChannel.AUDIO_AND_VIDEO, getPage().waitForEvent("playing")); // Guard time to play the video waitSeconds(PLAYTIME); // Assertions double currentTime = getPage().getCurrentTime(); Assert.assertTrue( "Error in play time (expected: " + PLAYTIME + " sec, real: " + currentTime + " sec)", getPage().compare(PLAYTIME, currentTime)); Assert.assertTrue("The color of the video should be green", getPage().similarColor(CHROME_VIDEOTEST_COLOR)); // Release Media Pipeline mp.release(); }
Example 16
Source File: PlayerFaceOverlayTest.java From kurento-java with Apache License 2.0 | 4 votes |
@Test public void testPlayerFaceOverlay() throws Exception { // Test data final int playTimeSeconds = 30; final String mediaUrl = "http://" + getTestFilesHttpPath() + "/video/filter/fiwarecut.mp4"; final Color expectedColor = Color.RED; final int xExpectedColor = 420; final int yExpectedColor = 45; final String imgOverlayUrl = "http://" + getTestFilesHttpPath() + "/img/red-square.png"; final float offsetXPercent = -0.2F; final float offsetYPercent = -1.2F; final float widthPercent = 1.6F; final float heightPercent = 1.6F; // Media Pipeline MediaPipeline mp = kurentoClient.createMediaPipeline(); PlayerEndpoint playerEp = new PlayerEndpoint.Builder(mp, mediaUrl).build(); WebRtcEndpoint webRtcEp = new WebRtcEndpoint.Builder(mp).build(); FaceOverlayFilter filter = new FaceOverlayFilter.Builder(mp).build(); filter.setOverlayedImage(imgOverlayUrl, offsetXPercent, offsetYPercent, widthPercent, heightPercent); playerEp.connect(filter); filter.connect(webRtcEp); final CountDownLatch eosLatch = new CountDownLatch(1); playerEp.addEndOfStreamListener(new EventListener<EndOfStreamEvent>() { @Override public void onEvent(EndOfStreamEvent event) { eosLatch.countDown(); } }); // Test execution getPage().subscribeEvents("playing"); getPage().initWebRtc(webRtcEp, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.RCV_ONLY); playerEp.play(); // Assertions Assert.assertTrue("Not received media (timeout waiting playing event)", getPage().waitForEvent("playing")); Assert.assertTrue( "Color at coordinates " + xExpectedColor + "," + yExpectedColor + " must be " + expectedColor, getPage().similarColorAt(expectedColor, xExpectedColor, yExpectedColor)); Assert.assertTrue("Not received EOS event in player", eosLatch.await(getPage().getTimeout(), TimeUnit.SECONDS)); double currentTime = getPage().getCurrentTime(); Assert.assertTrue( "Error in play time (expected: " + playTimeSeconds + " sec, real: " + currentTime + " sec)", getPage().compare(playTimeSeconds, currentTime)); // Release Media Pipeline mp.release(); }
Example 17
Source File: PlayerEndTest.java From kurento-java with Apache License 2.0 | 4 votes |
public void doTest(PlayerOperation playerOperation) throws Exception { // Test data final String mediaUrl = "http://" + getTestFilesHttpPath() + "/video/format/small.webm"; final int guardTimeSeconds = 10; // Media Pipeline MediaPipeline mp = kurentoClient.createMediaPipeline(); PlayerEndpoint playerEp = new PlayerEndpoint.Builder(mp, mediaUrl).build(); WebRtcEndpoint webRtcEp = new WebRtcEndpoint.Builder(mp).build(); playerEp.connect(webRtcEp); // Subscription to EOS event final boolean[] eos = new boolean[1]; eos[0] = false; playerEp.addEndOfStreamListener(new EventListener<EndOfStreamEvent>() { @Override public void onEvent(EndOfStreamEvent event) { log.error("EOS event received: {} {}", event.getType(), event.getTimestamp()); eos[0] = true; } }); // WebRTC in receive-only mode getPage().subscribeEvents("playing"); getPage().initWebRtc(webRtcEp, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.RCV_ONLY); playerEp.play(); Assert.assertTrue("Not received media (timeout waiting playing event)", getPage().waitForEvent("playing")); // Stop/release stream and wait x seconds switch (playerOperation) { case STOP: playerEp.stop(); break; case RELEASE: playerEp.release(); break; } Thread.sleep(TimeUnit.SECONDS.toMillis(guardTimeSeconds)); // Verify that EOS event has not being received Assert.assertFalse("EOS event has been received. " + "This should not be happenning because the stream has been stopped", eos[0]); // Release Media Pipeline mp.release(); }
Example 18
Source File: RecorderPipelineDestroyTest.java From kurento-java with Apache License 2.0 | 4 votes |
public void doTest(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec, String expectedAudioCodec, String extension) throws Exception { // Media Pipeline #1 final MediaPipeline mp = kurentoClient.createMediaPipeline(); PlayerEndpoint playerEp = new PlayerEndpoint.Builder(mp, getPlayerUrl("/video/10sec/green.webm")).build(); WebRtcEndpoint webRtcEp1 = new WebRtcEndpoint.Builder(mp).build(); String recordingFile = getRecordUrl(extension); final RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFile) .withMediaProfile(mediaProfileSpecType).build(); playerEp.connect(webRtcEp1); playerEp.connect(recorderEp); // Test execution #1. Play the video while it is recorded launchBrowser(mp, webRtcEp1, playerEp, recorderEp, expectedVideoCodec, expectedAudioCodec, recordingFile, EXPECTED_COLOR, 0, 0, PLAYTIME); final CountDownLatch latch = new CountDownLatch(1); ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor(); executor.schedule(new Runnable() { @Override public void run() { // Release Media Pipeline #1 mp.release(); latch.countDown(); } }, PLAYTIME / 2, TimeUnit.SECONDS); latch.await(getPage().getTimeout(), TimeUnit.SECONDS); // Reloading browser getPage().reload(); // Media Pipeline #2 MediaPipeline mp2 = kurentoClient.createMediaPipeline(); PlayerEndpoint playerEp2 = new PlayerEndpoint.Builder(mp2, recordingFile).build(); WebRtcEndpoint webRtcEp2 = new WebRtcEndpoint.Builder(mp2).build(); playerEp2.connect(webRtcEp2); // Playing the recording launchBrowser(null, webRtcEp2, playerEp2, null, expectedVideoCodec, expectedAudioCodec, recordingFile, EXPECTED_COLOR, 0, 0, PLAYTIME / 2); // Release Media Pipeline #2 mp2.release(); executor.shutdown(); success = true; }
Example 19
Source File: CompositeWebRtcTest.java From kurento-java with Apache License 2.0 | 4 votes |
@Test public void testCompositeWebRtc() throws Exception { // Media Pipeline MediaPipeline mp = kurentoClient.createMediaPipeline(); WebRtcEndpoint webRtcEpRed = new WebRtcEndpoint.Builder(mp).build(); WebRtcEndpoint webRtcEpGreen = new WebRtcEndpoint.Builder(mp).build(); WebRtcEndpoint webRtcEpBlue = new WebRtcEndpoint.Builder(mp).build(); Composite composite = new Composite.Builder(mp).build(); HubPort hubPort1 = new HubPort.Builder(composite).build(); HubPort hubPort2 = new HubPort.Builder(composite).build(); HubPort hubPort3 = new HubPort.Builder(composite).build(); webRtcEpRed.connect(hubPort1); webRtcEpGreen.connect(hubPort2); webRtcEpBlue.connect(hubPort3); WebRtcEndpoint webRtcEpWhite = new WebRtcEndpoint.Builder(mp).build(); HubPort hubPort4 = new HubPort.Builder(composite).build(); webRtcEpWhite.connect(hubPort4); WebRtcEndpoint webRtcEpComposite = new WebRtcEndpoint.Builder(mp).build(); HubPort hubPort5 = new HubPort.Builder(composite).build(); hubPort5.connect(webRtcEpComposite); // WebRTC browsers getPage(BROWSER2).initWebRtc(webRtcEpRed, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY); getPage(BROWSER3).initWebRtc(webRtcEpGreen, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY); getPage(BROWSER4).initWebRtc(webRtcEpBlue, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY); getPage(BROWSER5).initWebRtc(webRtcEpWhite, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY); getPage(BROWSER1).subscribeEvents("playing"); getPage(BROWSER1).initWebRtc(webRtcEpComposite, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.RCV_ONLY); // Assertions Assert.assertTrue("Not received media (timeout waiting playing event)", getPage(BROWSER1).waitForEvent("playing")); Assert.assertTrue("Upper left part of the video must be red", getPage(BROWSER1).similarColorAt(Color.RED, 0, 0)); Assert.assertTrue("Upper right part of the video must be green", getPage(BROWSER1).similarColorAt(Color.GREEN, 450, 0)); Assert.assertTrue("Lower left part of the video must be blue", getPage(BROWSER1).similarColorAt(Color.BLUE, 0, 450)); Assert.assertTrue("Lower right part of the video must be white", getPage(BROWSER1).similarColorAt(Color.WHITE, 450, 450)); // Finally, a black & white filter is connected to one WebRTC GStreamerFilter bwFilter = new GStreamerFilter.Builder(mp, "videobalance saturation=0.0").build(); webRtcEpRed.connect(bwFilter); bwFilter.connect(hubPort1); Thread.sleep(TimeUnit.SECONDS.toMillis(PLAYTIME)); Assert.assertTrue("When connecting the filter, the upper left part of the video must be gray", getPage(BROWSER1).similarColorAt(new Color(75, 75, 75), 0, 0)); // Release Media Pipeline mp.release(); }
Example 20
Source File: RecorderWebRtcSwitchSequentialTest.java From kurento-java with Apache License 2.0 | 4 votes |
public void doTest(MediaProfileSpecType mediaProfileSpecType, String expectedVideoCodec, String expectedAudioCodec, String extension) throws Exception { final CountDownLatch recorderLatch = new CountDownLatch(1); MediaPipeline mp = null; // Media Pipeline mp = kurentoClient.createMediaPipeline(); WebRtcEndpoint webRtcEp1 = new WebRtcEndpoint.Builder(mp).build(); WebRtcEndpoint webRtcEp2 = new WebRtcEndpoint.Builder(mp).build(); // WebRTC negotiation getPage(0).subscribeLocalEvents("playing"); getPage(0).initWebRtc(webRtcEp1, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY); getPage(1).subscribeLocalEvents("playing"); getPage(1).initWebRtc(webRtcEp2, WebRtcChannel.AUDIO_AND_VIDEO, WebRtcMode.SEND_ONLY); // Start record String recordingFile = getRecordUrl(extension); RecorderEndpoint recorderEp = new RecorderEndpoint.Builder(mp, recordingFile) .withMediaProfile(mediaProfileSpecType).build(); recorderEp.record(); // Switch webrtcs for (int i = 0; i < SWITCH_TIMES; i++) { if (i % 2 == 0) { webRtcEp1.connect(recorderEp); } else { webRtcEp2.connect(recorderEp); } Thread.sleep(SWITCH_RATE_MS); } // Stop record recorderEp.stopAndWait(new Continuation<Void>() { @Override public void onSuccess(Void result) throws Exception { recorderLatch.countDown(); } @Override public void onError(Throwable cause) throws Exception { recorderLatch.countDown(); } }); // Assessment Assert.assertTrue("Not received media in browser 1", getPage(0).waitForEvent("playing")); Assert.assertTrue("Not received media in browser 2", getPage(1).waitForEvent("playing")); Assert.assertTrue("Not stop properly", recorderLatch.await(getPage().getTimeout(), TimeUnit.SECONDS)); long expectedTimeMs = SWITCH_TIMES * SWITCH_RATE_MS; AssertMedia.assertCodecs(recordingFile, expectedVideoCodec, expectedAudioCodec); AssertMedia.assertDuration(recordingFile, expectedTimeMs, THRESHOLD_MS); // Release Media Pipeline if (mp != null) { mp.release(); } }