org.apache.hadoop.mapreduce.v2.app.job.Job Java Examples
The following examples show how to use
org.apache.hadoop.mapreduce.v2.app.job.Job.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TestRMContainerAllocator.java From hadoop with Apache License 2.0 | 6 votes |
private static AppContext createAppContext( ApplicationAttemptId appAttemptId, Job job) { AppContext context = mock(AppContext.class); ApplicationId appId = appAttemptId.getApplicationId(); when(context.getApplicationID()).thenReturn(appId); when(context.getApplicationAttemptId()).thenReturn(appAttemptId); when(context.getJob(isA(JobId.class))).thenReturn(job); when(context.getClusterInfo()).thenReturn( new ClusterInfo(Resource.newInstance(10240, 1))); when(context.getEventHandler()).thenReturn(new EventHandler() { @Override public void handle(Event event) { // Only capture interesting events. if (event instanceof TaskAttemptContainerAssignedEvent) { events.add((TaskAttemptContainerAssignedEvent) event); } else if (event instanceof TaskAttemptKillEvent) { taskAttemptKillEvents.add((TaskAttemptKillEvent)event); } else if (event instanceof JobUpdatedNodesEvent) { jobUpdatedNodeEvents.add((JobUpdatedNodesEvent)event); } else if (event instanceof JobEvent) { jobEvents.add((JobEvent)event); } } }); return context; }
Example #2
Source File: HsAttemptsPage.java From big-c with Apache License 2.0 | 6 votes |
@Override protected Collection<TaskAttempt> getTaskAttempts() { List<TaskAttempt> fewTaskAttemps = new ArrayList<TaskAttempt>(); String taskTypeStr = $(TASK_TYPE); TaskType taskType = MRApps.taskType(taskTypeStr); String attemptStateStr = $(ATTEMPT_STATE); TaskAttemptStateUI neededState = MRApps .taskAttemptState(attemptStateStr); Job j = app.getJob(); Map<TaskId, Task> tasks = j.getTasks(taskType); for (Task task : tasks.values()) { Map<TaskAttemptId, TaskAttempt> attempts = task.getAttempts(); for (TaskAttempt attempt : attempts.values()) { if (neededState.correspondsTo(attempt.getState())) { fewTaskAttemps.add(attempt); } } } return fewTaskAttemps; }
Example #3
Source File: TestAMWebServicesAttempts.java From hadoop with Apache License 2.0 | 6 votes |
@Test public void testTaskAttemptsDefault() throws JSONException, Exception { WebResource r = resource(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); for (Task task : jobsMap.get(id).getTasks().values()) { String tid = MRApps.toString(task.getID()); ClientResponse response = r.path("ws").path("v1").path("mapreduce") .path("jobs").path(jobId).path("tasks").path(tid).path("attempts") .get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); JSONObject json = response.getEntity(JSONObject.class); verifyAMTaskAttempts(json, task); } } }
Example #4
Source File: MRApp.java From hadoop with Apache License 2.0 | 6 votes |
public void verifyCompleted() { for (Job job : getContext().getAllJobs().values()) { JobReport jobReport = job.getReport(); System.out.println("Job start time :" + jobReport.getStartTime()); System.out.println("Job finish time :" + jobReport.getFinishTime()); Assert.assertTrue("Job start time is not less than finish time", jobReport.getStartTime() <= jobReport.getFinishTime()); Assert.assertTrue("Job finish time is in future", jobReport.getFinishTime() <= System.currentTimeMillis()); for (Task task : job.getTasks().values()) { TaskReport taskReport = task.getReport(); System.out.println("Task start time : " + taskReport.getStartTime()); System.out.println("Task finish time : " + taskReport.getFinishTime()); Assert.assertTrue("Task start time is not less than finish time", taskReport.getStartTime() <= taskReport.getFinishTime()); for (TaskAttempt attempt : task.getAttempts().values()) { TaskAttemptReport attemptReport = attempt.getReport(); Assert.assertTrue("Attempt start time is not less than finish time", attemptReport.getStartTime() <= attemptReport.getFinishTime()); } } } }
Example #5
Source File: TestHsWebServicesTasks.java From big-c with Apache License 2.0 | 6 votes |
@Test public void testTaskIdCountersDefault() throws JSONException, Exception { WebResource r = resource(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); for (Task task : jobsMap.get(id).getTasks().values()) { String tid = MRApps.toString(task.getID()); ClientResponse response = r.path("ws").path("v1").path("history") .path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid) .path("counters").get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); JSONObject info = json.getJSONObject("jobTaskCounters"); verifyHsJobTaskCounters(info, task); } } }
Example #6
Source File: TestHsWebServicesJobConf.java From big-c with Apache License 2.0 | 6 votes |
public void verifyHsJobConfXML(NodeList nodes, Job job) { assertEquals("incorrect number of elements", 1, nodes.getLength()); for (int i = 0; i < nodes.getLength(); i++) { Element element = (Element) nodes.item(i); WebServicesTestUtils.checkStringMatch("path", job.getConfFile() .toString(), WebServicesTestUtils.getXmlString(element, "path")); // just do simple verification of fields - not data is correct // in the fields NodeList properties = element.getElementsByTagName("property"); for (int j = 0; j < properties.getLength(); j++) { Element property = (Element) properties.item(j); assertNotNull("should have counters in the web service info", property); String name = WebServicesTestUtils.getXmlString(property, "name"); String value = WebServicesTestUtils.getXmlString(property, "value"); assertTrue("name not set", (name != null && !name.isEmpty())); assertTrue("name not set", (value != null && !value.isEmpty())); } } }
Example #7
Source File: TestHsWebServicesJobsQuery.java From big-c with Apache License 2.0 | 6 votes |
@Test public void testJobsQueryState() throws JSONException, Exception { WebResource r = resource(); // we only create 3 jobs and it cycles through states so we should have 3 unique states Map<JobId, Job> jobsMap = appContext.getAllJobs(); String queryState = "BOGUS"; JobId jid = null; for (Map.Entry<JobId, Job> entry : jobsMap.entrySet()) { jid = entry.getValue().getID(); queryState = entry.getValue().getState().toString(); break; } ClientResponse response = r.path("ws").path("v1").path("history") .path("mapreduce").path("jobs").queryParam("state", queryState) .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); JSONObject jobs = json.getJSONObject("jobs"); JSONArray arr = jobs.getJSONArray("job"); assertEquals("incorrect number of elements", 1, arr.length()); JSONObject info = arr.getJSONObject(0); Job job = appContext.getPartialJob(jid); VerifyJobsUtils.verifyHsJobPartial(info, job); }
Example #8
Source File: TestAMWebServicesJobConf.java From hadoop with Apache License 2.0 | 6 votes |
@Test public void testJobConfXML() throws JSONException, Exception { WebResource r = resource(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); ClientResponse response = r.path("ws").path("v1").path("mapreduce") .path("jobs").path(jobId).path("conf") .accept(MediaType.APPLICATION_XML).get(ClientResponse.class); assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType()); String xml = response.getEntity(String.class); DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); DocumentBuilder db = dbf.newDocumentBuilder(); InputSource is = new InputSource(); is.setCharacterStream(new StringReader(xml)); Document dom = db.parse(is); NodeList info = dom.getElementsByTagName("conf"); verifyAMJobConfXML(info, jobsMap.get(id)); } }
Example #9
Source File: AppController.java From hadoop with Apache License 2.0 | 6 votes |
/** * Ensure that a JOB_ID was passed into the page. */ public void requireJob() { if ($(JOB_ID).isEmpty()) { badRequest("missing job ID"); throw new RuntimeException("Bad Request: Missing job ID"); } JobId jobID = MRApps.toJobID($(JOB_ID)); app.setJob(app.context.getJob(jobID)); if (app.getJob() == null) { notFound($(JOB_ID)); throw new RuntimeException("Not Found: " + $(JOB_ID)); } /* check for acl access */ Job job = app.context.getJob(jobID); if (!checkAccess(job)) { accessDenied("User " + request().getRemoteUser() + " does not have " + " permission to view job " + $(JOB_ID)); throw new RuntimeException("Access denied: User " + request().getRemoteUser() + " does not have permission to view job " + $(JOB_ID)); } }
Example #10
Source File: TestRuntimeEstimators.java From big-c with Apache License 2.0 | 6 votes |
private float getReduceProgress() { Job job = myAppContext.getJob(myAttemptID.getTaskId().getJobId()); float runtime = getCodeRuntime(); Collection<Task> allMapTasks = job.getTasks(TaskType.MAP).values(); int numberMaps = allMapTasks.size(); int numberDoneMaps = 0; for (Task mapTask : allMapTasks) { if (mapTask.isFinished()) { ++numberDoneMaps; } } if (numberMaps == numberDoneMaps) { shuffleCompletedTime = Math.min(shuffleCompletedTime, clock.getTime()); return Math.min ((float) (clock.getTime() - shuffleCompletedTime) / (runtime * 2000.0F) + 0.5F, 1.0F); } else { return ((float) numberDoneMaps) / numberMaps * 0.5F; } }
Example #11
Source File: TestStagingCleanup.java From big-c with Apache License 2.0 | 6 votes |
@Override protected Job createJob(Configuration conf, JobStateInternal forcedState, String diagnostic) { UserGroupInformation currentUser = null; try { currentUser = UserGroupInformation.getCurrentUser(); } catch (IOException e) { throw new YarnRuntimeException(e); } Job newJob = new TestJob(getJobId(), getAttemptID(), conf, getDispatcher().getEventHandler(), getTaskAttemptListener(), getContext().getClock(), getCommitter(), isNewApiCommitter(), currentUser.getUserName(), getContext(), forcedState, diagnostic); ((AppContext) getContext()).getAllJobs().put(newJob.getID(), newJob); getDispatcher().register(JobFinishEvent.Type.class, createJobFinishEventHandler()); return newJob; }
Example #12
Source File: TestBlocks.java From hadoop with Apache License 2.0 | 6 votes |
private Job getJob() { Job job = mock(Job.class); JobId jobId = new JobIdPBImpl(); ApplicationId appId = ApplicationIdPBImpl.newInstance(System.currentTimeMillis(),4); jobId.setAppId(appId); jobId.setId(1); when(job.getID()).thenReturn(jobId); JobReport report = mock(JobReport.class); when(report.getStartTime()).thenReturn(100010L); when(report.getFinishTime()).thenReturn(100015L); when(job.getReport()).thenReturn(report); when(job.getName()).thenReturn("JobName"); when(job.getUserName()).thenReturn("UserName"); when(job.getQueueName()).thenReturn("QueueName"); when(job.getState()).thenReturn(JobState.SUCCEEDED); when(job.getTotalMaps()).thenReturn(3); when(job.getCompletedMaps()).thenReturn(2); when(job.getTotalReduces()).thenReturn(2); when(job.getCompletedReduces()).thenReturn(1); when(job.getCompletedReduces()).thenReturn(1); return job; }
Example #13
Source File: AMWebServices.java From hadoop with Apache License 2.0 | 6 votes |
@GET @Path("/jobs") @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) public JobsInfo getJobs(@Context HttpServletRequest hsr) { init(); JobsInfo allJobs = new JobsInfo(); for (Job job : appCtx.getAllJobs().values()) { // getAllJobs only gives you a partial we want a full Job fullJob = appCtx.getJob(job.getID()); if (fullJob == null) { continue; } allJobs.add(new JobInfo(fullJob, hasAccess(fullJob, hsr))); } return allJobs; }
Example #14
Source File: LegacyTaskRuntimeEstimator.java From big-c with Apache License 2.0 | 6 votes |
private long storedPerAttemptValue (Map<TaskAttempt, AtomicLong> data, TaskAttemptId attemptID) { TaskId taskID = attemptID.getTaskId(); JobId jobID = taskID.getJobId(); Job job = context.getJob(jobID); Task task = job.getTask(taskID); if (task == null) { return -1L; } TaskAttempt taskAttempt = task.getAttempt(attemptID); if (taskAttempt == null) { return -1L; } AtomicLong estimate = data.get(taskAttempt); return estimate == null ? -1L : estimate.get(); }
Example #15
Source File: TestMRApp.java From hadoop with Apache License 2.0 | 6 votes |
@Test public void testJobRebootOnLastRetryOnUnregistrationFailure() throws Exception { // make startCount as 2 since this is last retry which equals to // DEFAULT_MAX_AM_RETRY // The last param mocks the unregistration failure MRApp app = new MRApp(1, 0, false, this.getClass().getName(), true, 2, false); Configuration conf = new Configuration(); Job job = app.submit(conf); app.waitForState(job, JobState.RUNNING); Assert.assertEquals("Num tasks not correct", 1, job.getTasks().size()); Iterator<Task> it = job.getTasks().values().iterator(); Task task = it.next(); app.waitForState(task, TaskState.RUNNING); //send an reboot event app.getContext().getEventHandler().handle(new JobEvent(job.getID(), JobEventType.JOB_AM_REBOOT)); app.waitForInternalState((JobImpl) job, JobStateInternal.REBOOT); // return exteranl state as RUNNING if this is the last retry while // unregistration fails app.waitForState(job, JobState.RUNNING); }
Example #16
Source File: TestMRApp.java From hadoop with Apache License 2.0 | 6 votes |
@Test public void testJobError() throws Exception { MRApp app = new MRApp(1, 0, false, this.getClass().getName(), true); Job job = app.submit(new Configuration()); app.waitForState(job, JobState.RUNNING); Assert.assertEquals("Num tasks not correct", 1, job.getTasks().size()); Iterator<Task> it = job.getTasks().values().iterator(); Task task = it.next(); app.waitForState(task, TaskState.RUNNING); //send an invalid event on task at current state app.getContext().getEventHandler().handle( new TaskEvent( task.getID(), TaskEventType.T_SCHEDULE)); //this must lead to job error app.waitForState(job, JobState.ERROR); }
Example #17
Source File: TestStagingCleanup.java From hadoop with Apache License 2.0 | 6 votes |
@Override protected Job createJob(Configuration conf, JobStateInternal forcedState, String diagnostic) { UserGroupInformation currentUser = null; try { currentUser = UserGroupInformation.getCurrentUser(); } catch (IOException e) { throw new YarnRuntimeException(e); } Job newJob = new TestJob(getJobId(), getAttemptID(), conf, getDispatcher().getEventHandler(), getTaskAttemptListener(), getContext().getClock(), getCommitter(), isNewApiCommitter(), currentUser.getUserName(), getContext(), forcedState, diagnostic); ((AppContext) getContext()).getAllJobs().put(newJob.getID(), newJob); getDispatcher().register(JobFinishEvent.Type.class, createJobFinishEventHandler()); return newJob; }
Example #18
Source File: TestHsWebServicesJobs.java From big-c with Apache License 2.0 | 6 votes |
@Test public void testJobAttempts() throws JSONException, Exception { WebResource r = resource(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); ClientResponse response = r.path("ws").path("v1").path("history") .path("mapreduce").path("jobs").path(jobId).path("jobattempts") .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); JSONObject info = json.getJSONObject("jobAttempts"); verifyHsJobAttempts(info, appContext.getJob(id)); } }
Example #19
Source File: TestAMWebServicesJobs.java From big-c with Apache License 2.0 | 6 votes |
@Test public void testJobCountersXML() throws Exception { WebResource r = resource(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); ClientResponse response = r.path("ws").path("v1").path("mapreduce") .path("jobs").path(jobId).path("counters") .accept(MediaType.APPLICATION_XML).get(ClientResponse.class); assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType()); String xml = response.getEntity(String.class); DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); DocumentBuilder db = dbf.newDocumentBuilder(); InputSource is = new InputSource(); is.setCharacterStream(new StringReader(xml)); Document dom = db.parse(is); NodeList info = dom.getElementsByTagName("jobCounters"); verifyAMJobCountersXML(info, jobsMap.get(id)); } }
Example #20
Source File: TestHsWebServicesTasks.java From hadoop with Apache License 2.0 | 6 votes |
@Test public void testTaskIdCountersDefault() throws JSONException, Exception { WebResource r = resource(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); for (Task task : jobsMap.get(id).getTasks().values()) { String tid = MRApps.toString(task.getID()); ClientResponse response = r.path("ws").path("v1").path("history") .path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid) .path("counters").get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); JSONObject info = json.getJSONObject("jobTaskCounters"); verifyHsJobTaskCounters(info, task); } } }
Example #21
Source File: TestHsWebServicesJobs.java From big-c with Apache License 2.0 | 6 votes |
@Test public void testJobAttemptsXML() throws Exception { WebResource r = resource(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); ClientResponse response = r.path("ws").path("v1").path("history") .path("mapreduce").path("jobs").path(jobId).path("jobattempts") .accept(MediaType.APPLICATION_XML).get(ClientResponse.class); assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType()); String xml = response.getEntity(String.class); DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); DocumentBuilder db = dbf.newDocumentBuilder(); InputSource is = new InputSource(); is.setCharacterStream(new StringReader(xml)); Document dom = db.parse(is); NodeList attempts = dom.getElementsByTagName("jobAttempts"); assertEquals("incorrect number of elements", 1, attempts.getLength()); NodeList info = dom.getElementsByTagName("jobAttempt"); verifyHsJobAttemptsXML(info, appContext.getJob(id)); } }
Example #22
Source File: TestAMWebServicesTasks.java From hadoop with Apache License 2.0 | 6 votes |
@Test public void testTaskIdCountersDefault() throws JSONException, Exception { WebResource r = resource(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); for (Task task : jobsMap.get(id).getTasks().values()) { String tid = MRApps.toString(task.getID()); ClientResponse response = r.path("ws").path("v1").path("mapreduce") .path("jobs").path(jobId).path("tasks").path(tid).path("counters") .get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); JSONObject info = json.getJSONObject("jobTaskCounters"); verifyAMJobTaskCounters(info, task); } } }
Example #23
Source File: TestAMWebServicesJobs.java From big-c with Apache License 2.0 | 6 votes |
@Test public void testJobAttemptsDefault() throws JSONException, Exception { WebResource r = resource(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); ClientResponse response = r.path("ws").path("v1") .path("mapreduce").path("jobs").path(jobId).path("jobattempts") .get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); JSONObject info = json.getJSONObject("jobAttempts"); verifyJobAttempts(info, jobsMap.get(id)); } }
Example #24
Source File: TestHsWebServicesJobConf.java From hadoop with Apache License 2.0 | 6 votes |
public void verifyHsJobConf(JSONObject info, Job job) throws JSONException { assertEquals("incorrect number of elements", 2, info.length()); WebServicesTestUtils.checkStringMatch("path", job.getConfFile().toString(), info.getString("path")); // just do simple verification of fields - not data is correct // in the fields JSONArray properties = info.getJSONArray("property"); for (int i = 0; i < properties.length(); i++) { JSONObject prop = properties.getJSONObject(i); String name = prop.getString("name"); String value = prop.getString("value"); assertTrue("name not set", (name != null && !name.isEmpty())); assertTrue("value not set", (value != null && !value.isEmpty())); } }
Example #25
Source File: TestAMWebServicesTasks.java From big-c with Apache License 2.0 | 6 votes |
@Test public void testTasks() throws JSONException, Exception { WebResource r = resource(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); ClientResponse response = r.path("ws").path("v1").path("mapreduce") .path("jobs").path(jobId).path("tasks") .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); JSONObject tasks = json.getJSONObject("tasks"); JSONArray arr = tasks.getJSONArray("task"); assertEquals("incorrect number of elements", 2, arr.length()); verifyAMTask(arr, jobsMap.get(id), null); } }
Example #26
Source File: TestAMWebServicesJobs.java From big-c with Apache License 2.0 | 6 votes |
@Test public void testJobIdXML() throws Exception { WebResource r = resource(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); ClientResponse response = r.path("ws").path("v1").path("mapreduce") .path("jobs").path(jobId).accept(MediaType.APPLICATION_XML) .get(ClientResponse.class); assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType()); String xml = response.getEntity(String.class); DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); DocumentBuilder db = dbf.newDocumentBuilder(); InputSource is = new InputSource(); is.setCharacterStream(new StringReader(xml)); Document dom = db.parse(is); NodeList job = dom.getElementsByTagName("job"); verifyAMJobXML(job, appContext); } }
Example #27
Source File: TestRMContainerAllocator.java From hadoop with Apache License 2.0 | 5 votes |
public MyContainerAllocator(MyResourceManager rm, Configuration conf, ApplicationAttemptId appAttemptId, Job job) { super(createMockClientService(), createAppContext(appAttemptId, job)); this.rm = rm; super.init(conf); super.start(); }
Example #28
Source File: TestHsWebServicesJobsQuery.java From big-c with Apache License 2.0 | 5 votes |
@Test public void testJobsQueryFinishTimeBeginEnd() throws JSONException, Exception { WebResource r = resource(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); int size = jobsMap.size(); // figure out the mid end time - we expect atleast 3 jobs ArrayList<Long> finishTime = new ArrayList<Long>(size); for (Map.Entry<JobId, Job> entry : jobsMap.entrySet()) { finishTime.add(entry.getValue().getReport().getFinishTime()); } Collections.sort(finishTime); assertTrue("Error we must have atleast 3 jobs", size >= 3); long midFinishTime = finishTime.get(size - 2); ClientResponse response = r.path("ws").path("v1").path("history") .path("mapreduce").path("jobs") .queryParam("finishedTimeBegin", String.valueOf(40000)) .queryParam("finishedTimeEnd", String.valueOf(midFinishTime)) .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); JSONObject jobs = json.getJSONObject("jobs"); JSONArray arr = jobs.getJSONArray("job"); assertEquals("incorrect number of elements", size - 1, arr.length()); }
Example #29
Source File: TestHsWebServicesAttempts.java From hadoop with Apache License 2.0 | 5 votes |
private void testTaskAttemptIdErrorGeneric(String attid, String error) throws JSONException, Exception { WebResource r = resource(); Map<JobId, Job> jobsMap = appContext.getAllJobs(); for (JobId id : jobsMap.keySet()) { String jobId = MRApps.toString(id); for (Task task : jobsMap.get(id).getTasks().values()) { String tid = MRApps.toString(task.getID()); try { r.path("ws").path("v1").path("history").path("mapreduce") .path("jobs").path(jobId).path("tasks").path(tid) .path("attempts").path(attid).accept(MediaType.APPLICATION_JSON) .get(JSONObject.class); fail("should have thrown exception on invalid uri"); } catch (UniformInterfaceException ue) { ClientResponse response = ue.getResponse(); assertEquals(Status.NOT_FOUND, response.getClientResponseStatus()); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); JSONObject msg = response.getEntity(JSONObject.class); JSONObject exception = msg.getJSONObject("RemoteException"); assertEquals("incorrect number of elements", 3, exception.length()); String message = exception.getString("message"); String type = exception.getString("exception"); String classname = exception.getString("javaClassName"); WebServicesTestUtils.checkStringMatch("exception message", error, message); WebServicesTestUtils.checkStringMatch("exception type", "NotFoundException", type); WebServicesTestUtils.checkStringMatch("exception classname", "org.apache.hadoop.yarn.webapp.NotFoundException", classname); } } } }
Example #30
Source File: TestHsWebServicesJobs.java From big-c with Apache License 2.0 | 5 votes |
public void verifyHsJobAttempts(JSONObject info, Job job) throws JSONException { JSONArray attempts = info.getJSONArray("jobAttempt"); assertEquals("incorrect number of elements", 2, attempts.length()); for (int i = 0; i < attempts.length(); i++) { JSONObject attempt = attempts.getJSONObject(i); verifyHsJobAttemptsGeneric(job, attempt.getString("nodeHttpAddress"), attempt.getString("nodeId"), attempt.getInt("id"), attempt.getLong("startTime"), attempt.getString("containerId"), attempt.getString("logsLink")); } }