Java Code Examples for org.apache.hadoop.hdfs.DFSTestUtil#urlGet()
The following examples show how to use
org.apache.hadoop.hdfs.DFSTestUtil#urlGet() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestNfs3HttpServer.java From hadoop with Apache License 2.0 | 6 votes |
@Test public void testHttpServer() throws Exception { Nfs3 nfs = new Nfs3(conf); nfs.startServiceInternal(false); RpcProgramNfs3 nfsd = (RpcProgramNfs3) nfs.getRpcProgram(); Nfs3HttpServer infoServer = nfsd.getInfoServer(); String urlRoot = infoServer.getServerURI().toString(); // Check default servlets. String pageContents = DFSTestUtil.urlGet(new URL(urlRoot + "/jmx")); assertTrue("Bad contents: " + pageContents, pageContents.contains("java.lang:type=")); System.out.println("pc:" + pageContents); int port = infoServer.getSecurePort(); assertTrue("Can't get https port", port > 0); }
Example 2
Source File: TestNfs3HttpServer.java From big-c with Apache License 2.0 | 6 votes |
@Test public void testHttpServer() throws Exception { Nfs3 nfs = new Nfs3(conf); nfs.startServiceInternal(false); RpcProgramNfs3 nfsd = (RpcProgramNfs3) nfs.getRpcProgram(); Nfs3HttpServer infoServer = nfsd.getInfoServer(); String urlRoot = infoServer.getServerURI().toString(); // Check default servlets. String pageContents = DFSTestUtil.urlGet(new URL(urlRoot + "/jmx")); assertTrue("Bad contents: " + pageContents, pageContents.contains("java.lang:type=")); System.out.println("pc:" + pageContents); int port = infoServer.getSecurePort(); assertTrue("Can't get https port", port > 0); }
Example 3
Source File: TestStandbyCheckpoints.java From hadoop with Apache License 2.0 | 4 votes |
@Test(timeout=300000) public void testReadsAllowedDuringCheckpoint() throws Exception { // Set it up so that we know when the SBN checkpoint starts and ends. FSImage spyImage1 = NameNodeAdapter.spyOnFsImage(nn1); DelayAnswer answerer = new DelayAnswer(LOG); Mockito.doAnswer(answerer).when(spyImage1) .saveNamespace(Mockito.any(FSNamesystem.class), Mockito.any(NameNodeFile.class), Mockito.any(Canceler.class)); // Perform some edits and wait for a checkpoint to start on the SBN. doEdits(0, 1000); nn0.getRpcServer().rollEditLog(); answerer.waitForCall(); assertTrue("SBN is not performing checkpoint but it should be.", answerer.getFireCount() == 1 && answerer.getResultCount() == 0); // Make sure that the lock has actually been taken by the checkpointing // thread. ThreadUtil.sleepAtLeastIgnoreInterrupts(1000); // Perform an RPC that needs to take the write lock. Thread t = new Thread() { @Override public void run() { try { nn1.getRpcServer().restoreFailedStorage("false"); } catch (IOException e) { e.printStackTrace(); } } }; t.start(); // Make sure that our thread is waiting for the lock. ThreadUtil.sleepAtLeastIgnoreInterrupts(1000); assertFalse(nn1.getNamesystem().getFsLockForTests().hasQueuedThreads()); assertFalse(nn1.getNamesystem().getFsLockForTests().isWriteLocked()); assertTrue(nn1.getNamesystem().getCpLockForTests().hasQueuedThreads()); // Get /jmx of the standby NN web UI, which will cause the FSNS read lock to // be taken. String pageContents = DFSTestUtil.urlGet(new URL("http://" + nn1.getHttpAddress().getHostName() + ":" + nn1.getHttpAddress().getPort() + "/jmx")); assertTrue(pageContents.contains("NumLiveDataNodes")); // Make sure that the checkpoint is still going on, implying that the client // RPC to the SBN happened during the checkpoint. assertTrue("SBN should have still been checkpointing.", answerer.getFireCount() == 1 && answerer.getResultCount() == 0); answerer.proceed(); answerer.waitForResult(); assertTrue("SBN should have finished checkpointing.", answerer.getFireCount() == 1 && answerer.getResultCount() == 1); t.join(); }
Example 4
Source File: TestJournalNode.java From hadoop with Apache License 2.0 | 4 votes |
@Test(timeout=100000) public void testHttpServer() throws Exception { String urlRoot = jn.getHttpServerURI(); // Check default servlets. String pageContents = DFSTestUtil.urlGet(new URL(urlRoot + "/jmx")); assertTrue("Bad contents: " + pageContents, pageContents.contains( "Hadoop:service=JournalNode,name=JvmMetrics")); // Create some edits on server side byte[] EDITS_DATA = QJMTestUtil.createTxnData(1, 3); IPCLoggerChannel ch = new IPCLoggerChannel( conf, FAKE_NSINFO, journalId, jn.getBoundIpcAddress()); ch.newEpoch(1).get(); ch.setEpoch(1); ch.startLogSegment(1, NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION).get(); ch.sendEdits(1L, 1, 3, EDITS_DATA).get(); ch.finalizeLogSegment(1, 3).get(); // Attempt to retrieve via HTTP, ensure we get the data back // including the header we expected byte[] retrievedViaHttp = DFSTestUtil.urlGetBytes(new URL(urlRoot + "/getJournal?segmentTxId=1&jid=" + journalId)); byte[] expected = Bytes.concat( Ints.toByteArray(HdfsConstants.NAMENODE_LAYOUT_VERSION), (new byte[] { 0, 0, 0, 0 }), // layout flags section EDITS_DATA); assertArrayEquals(expected, retrievedViaHttp); // Attempt to fetch a non-existent file, check that we get an // error status code URL badUrl = new URL(urlRoot + "/getJournal?segmentTxId=12345&jid=" + journalId); HttpURLConnection connection = (HttpURLConnection)badUrl.openConnection(); try { assertEquals(404, connection.getResponseCode()); } finally { connection.disconnect(); } }
Example 5
Source File: TestStandbyCheckpoints.java From big-c with Apache License 2.0 | 4 votes |
@Test(timeout=300000) public void testReadsAllowedDuringCheckpoint() throws Exception { // Set it up so that we know when the SBN checkpoint starts and ends. FSImage spyImage1 = NameNodeAdapter.spyOnFsImage(nn1); DelayAnswer answerer = new DelayAnswer(LOG); Mockito.doAnswer(answerer).when(spyImage1) .saveNamespace(Mockito.any(FSNamesystem.class), Mockito.any(NameNodeFile.class), Mockito.any(Canceler.class)); // Perform some edits and wait for a checkpoint to start on the SBN. doEdits(0, 1000); nn0.getRpcServer().rollEditLog(); answerer.waitForCall(); assertTrue("SBN is not performing checkpoint but it should be.", answerer.getFireCount() == 1 && answerer.getResultCount() == 0); // Make sure that the lock has actually been taken by the checkpointing // thread. ThreadUtil.sleepAtLeastIgnoreInterrupts(1000); // Perform an RPC that needs to take the write lock. Thread t = new Thread() { @Override public void run() { try { nn1.getRpcServer().restoreFailedStorage("false"); } catch (IOException e) { e.printStackTrace(); } } }; t.start(); // Make sure that our thread is waiting for the lock. ThreadUtil.sleepAtLeastIgnoreInterrupts(1000); assertFalse(nn1.getNamesystem().getFsLockForTests().hasQueuedThreads()); assertFalse(nn1.getNamesystem().getFsLockForTests().isWriteLocked()); assertTrue(nn1.getNamesystem().getCpLockForTests().hasQueuedThreads()); // Get /jmx of the standby NN web UI, which will cause the FSNS read lock to // be taken. String pageContents = DFSTestUtil.urlGet(new URL("http://" + nn1.getHttpAddress().getHostName() + ":" + nn1.getHttpAddress().getPort() + "/jmx")); assertTrue(pageContents.contains("NumLiveDataNodes")); // Make sure that the checkpoint is still going on, implying that the client // RPC to the SBN happened during the checkpoint. assertTrue("SBN should have still been checkpointing.", answerer.getFireCount() == 1 && answerer.getResultCount() == 0); answerer.proceed(); answerer.waitForResult(); assertTrue("SBN should have finished checkpointing.", answerer.getFireCount() == 1 && answerer.getResultCount() == 1); t.join(); }
Example 6
Source File: TestJournalNode.java From big-c with Apache License 2.0 | 4 votes |
@Test(timeout=100000) public void testHttpServer() throws Exception { String urlRoot = jn.getHttpServerURI(); // Check default servlets. String pageContents = DFSTestUtil.urlGet(new URL(urlRoot + "/jmx")); assertTrue("Bad contents: " + pageContents, pageContents.contains( "Hadoop:service=JournalNode,name=JvmMetrics")); // Create some edits on server side byte[] EDITS_DATA = QJMTestUtil.createTxnData(1, 3); IPCLoggerChannel ch = new IPCLoggerChannel( conf, FAKE_NSINFO, journalId, jn.getBoundIpcAddress()); ch.newEpoch(1).get(); ch.setEpoch(1); ch.startLogSegment(1, NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION).get(); ch.sendEdits(1L, 1, 3, EDITS_DATA).get(); ch.finalizeLogSegment(1, 3).get(); // Attempt to retrieve via HTTP, ensure we get the data back // including the header we expected byte[] retrievedViaHttp = DFSTestUtil.urlGetBytes(new URL(urlRoot + "/getJournal?segmentTxId=1&jid=" + journalId)); byte[] expected = Bytes.concat( Ints.toByteArray(HdfsConstants.NAMENODE_LAYOUT_VERSION), (new byte[] { 0, 0, 0, 0 }), // layout flags section EDITS_DATA); assertArrayEquals(expected, retrievedViaHttp); // Attempt to fetch a non-existent file, check that we get an // error status code URL badUrl = new URL(urlRoot + "/getJournal?segmentTxId=12345&jid=" + journalId); HttpURLConnection connection = (HttpURLConnection)badUrl.openConnection(); try { assertEquals(404, connection.getResponseCode()); } finally { connection.disconnect(); } }