Java Code Examples for org.apache.hadoop.mapreduce.v2.api.records.TaskId#getTaskType()
The following examples show how to use
org.apache.hadoop.mapreduce.v2.api.records.TaskId#getTaskType() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DefaultSpeculator.java From hadoop with Apache License 2.0 | 6 votes |
private AtomicInteger containerNeed(TaskId taskID) { JobId jobID = taskID.getJobId(); TaskType taskType = taskID.getTaskType(); ConcurrentMap<JobId, AtomicInteger> relevantMap = taskType == TaskType.MAP ? mapContainerNeeds : reduceContainerNeeds; AtomicInteger result = relevantMap.get(jobID); if (result == null) { relevantMap.putIfAbsent(jobID, new AtomicInteger(0)); result = relevantMap.get(jobID); } return result; }
Example 2
Source File: DefaultSpeculator.java From big-c with Apache License 2.0 | 6 votes |
private AtomicInteger containerNeed(TaskId taskID) { JobId jobID = taskID.getJobId(); TaskType taskType = taskID.getTaskType(); ConcurrentMap<JobId, AtomicInteger> relevantMap = taskType == TaskType.MAP ? mapContainerNeeds : reduceContainerNeeds; AtomicInteger result = relevantMap.get(jobID); if (result == null) { relevantMap.putIfAbsent(jobID, new AtomicInteger(0)); result = relevantMap.get(jobID); } return result; }
Example 3
Source File: StartEndTimesBase.java From hadoop with Apache License 2.0 | 5 votes |
@Override public long thresholdRuntime(TaskId taskID) { JobId jobID = taskID.getJobId(); Job job = context.getJob(jobID); TaskType type = taskID.getTaskType(); DataStatistics statistics = dataStatisticsForTask(taskID); int completedTasksOfType = type == TaskType.MAP ? job.getCompletedMaps() : job.getCompletedReduces(); int totalTasksOfType = type == TaskType.MAP ? job.getTotalMaps() : job.getTotalReduces(); if (completedTasksOfType < MINIMUM_COMPLETE_NUMBER_TO_SPECULATE || (((float)completedTasksOfType) / totalTasksOfType) < MINIMUM_COMPLETE_PROPORTION_TO_SPECULATE ) { return Long.MAX_VALUE; } long result = statistics == null ? Long.MAX_VALUE : (long)statistics.outlier(slowTaskRelativeTresholds.get(job)); return result; }
Example 4
Source File: TestMRApps.java From hadoop with Apache License 2.0 | 5 votes |
@Test (timeout = 120000) public void testTaskIDtoString() { TaskId tid = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskId.class); tid.setJobId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class)); tid.getJobId().setAppId(ApplicationId.newInstance(0, 0)); tid.setTaskType(TaskType.MAP); TaskType type = tid.getTaskType(); System.err.println(type); type = TaskType.REDUCE; System.err.println(type); System.err.println(tid.getTaskType()); assertEquals("task_0_0000_m_000000", MRApps.toString(tid)); tid.setTaskType(TaskType.REDUCE); assertEquals("task_0_0000_r_000000", MRApps.toString(tid)); }
Example 5
Source File: HsTaskPage.java From hadoop with Apache License 2.0 | 5 votes |
/** * @return The end of the JS map that is the jquery datatable config for the * attempts table. */ private String attemptsTableInit() { TaskType type = null; String symbol = $(TASK_TYPE); if (!symbol.isEmpty()) { type = MRApps.taskType(symbol); } else { TaskId taskID = MRApps.toTaskID($(TASK_ID)); type = taskID.getTaskType(); } StringBuilder b = tableInit() .append(", 'aaData': attemptsTableData") .append(", bDeferRender: true") .append(", bProcessing: true") .append("\n,aoColumnDefs:[\n") //logs column should not filterable (it includes container ID which may pollute searches) .append("\n{'aTargets': [ 4 ]") .append(", 'bSearchable': false }") .append("\n, {'sType':'numeric', 'aTargets': [ 0 ]") .append(", 'mRender': parseHadoopAttemptID }") .append("\n, {'sType':'numeric', 'aTargets': [ 5, 6") //Column numbers are different for maps and reduces .append(type == TaskType.REDUCE ? ", 7, 8" : "") .append(" ], 'mRender': renderHadoopDate }") .append("\n, {'sType':'numeric', 'aTargets': [") .append(type == TaskType.REDUCE ? "9, 10, 11, 12" : "7") .append(" ], 'mRender': renderHadoopElapsedTime }]") // Sort by id upon page load .append("\n, aaSorting: [[0, 'asc']]") .append("}"); return b.toString(); }
Example 6
Source File: StartEndTimesBase.java From big-c with Apache License 2.0 | 5 votes |
@Override public long thresholdRuntime(TaskId taskID) { JobId jobID = taskID.getJobId(); Job job = context.getJob(jobID); TaskType type = taskID.getTaskType(); DataStatistics statistics = dataStatisticsForTask(taskID); int completedTasksOfType = type == TaskType.MAP ? job.getCompletedMaps() : job.getCompletedReduces(); int totalTasksOfType = type == TaskType.MAP ? job.getTotalMaps() : job.getTotalReduces(); if (completedTasksOfType < MINIMUM_COMPLETE_NUMBER_TO_SPECULATE || (((float)completedTasksOfType) / totalTasksOfType) < MINIMUM_COMPLETE_PROPORTION_TO_SPECULATE ) { return Long.MAX_VALUE; } long result = statistics == null ? Long.MAX_VALUE : (long)statistics.outlier(slowTaskRelativeTresholds.get(job)); return result; }
Example 7
Source File: TestMRApps.java From big-c with Apache License 2.0 | 5 votes |
@Test (timeout = 120000) public void testTaskIDtoString() { TaskId tid = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskId.class); tid.setJobId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class)); tid.getJobId().setAppId(ApplicationId.newInstance(0, 0)); tid.setTaskType(TaskType.MAP); TaskType type = tid.getTaskType(); System.err.println(type); type = TaskType.REDUCE; System.err.println(type); System.err.println(tid.getTaskType()); assertEquals("task_0_0000_m_000000", MRApps.toString(tid)); tid.setTaskType(TaskType.REDUCE); assertEquals("task_0_0000_r_000000", MRApps.toString(tid)); }
Example 8
Source File: HsTaskPage.java From big-c with Apache License 2.0 | 5 votes |
/** * @return The end of the JS map that is the jquery datatable config for the * attempts table. */ private String attemptsTableInit() { TaskType type = null; String symbol = $(TASK_TYPE); if (!symbol.isEmpty()) { type = MRApps.taskType(symbol); } else { TaskId taskID = MRApps.toTaskID($(TASK_ID)); type = taskID.getTaskType(); } StringBuilder b = tableInit() .append(", 'aaData': attemptsTableData") .append(", bDeferRender: true") .append(", bProcessing: true") .append("\n,aoColumnDefs:[\n") //logs column should not filterable (it includes container ID which may pollute searches) .append("\n{'aTargets': [ 4 ]") .append(", 'bSearchable': false }") .append("\n, {'sType':'numeric', 'aTargets': [ 0 ]") .append(", 'mRender': parseHadoopAttemptID }") .append("\n, {'sType':'numeric', 'aTargets': [ 5, 6") //Column numbers are different for maps and reduces .append(type == TaskType.REDUCE ? ", 7, 8" : "") .append(" ], 'mRender': renderHadoopDate }") .append("\n, {'sType':'numeric', 'aTargets': [") .append(type == TaskType.REDUCE ? "9, 10, 11, 12" : "7") .append(" ], 'mRender': renderHadoopElapsedTime }]") // Sort by id upon page load .append("\n, aaSorting: [[0, 'asc']]") .append("}"); return b.toString(); }
Example 9
Source File: MockJobs.java From hadoop with Apache License 2.0 | 4 votes |
public static Task newTask(JobId jid, int i, int m, final boolean hasFailedTasks) { final TaskId tid = Records.newRecord(TaskId.class); tid.setJobId(jid); tid.setId(i); tid.setTaskType(TASK_TYPES.next()); final TaskReport report = newTaskReport(tid); final Map<TaskAttemptId, TaskAttempt> attempts = newTaskAttempts(tid, m); return new Task() { @Override public TaskId getID() { return tid; } @Override public TaskReport getReport() { return report; } @Override public Counters getCounters() { if (hasFailedTasks) { return null; } return new Counters( TypeConverter.fromYarn(report.getCounters())); } @Override public float getProgress() { return report.getProgress(); } @Override public TaskType getType() { return tid.getTaskType(); } @Override public Map<TaskAttemptId, TaskAttempt> getAttempts() { return attempts; } @Override public TaskAttempt getAttempt(TaskAttemptId attemptID) { return attempts.get(attemptID); } @Override public boolean isFinished() { switch (report.getTaskState()) { case SUCCEEDED: case KILLED: case FAILED: return true; } return false; } @Override public boolean canCommit(TaskAttemptId taskAttemptID) { return false; } @Override public TaskState getState() { return report.getTaskState(); } }; }
Example 10
Source File: MockJobs.java From big-c with Apache License 2.0 | 4 votes |
public static Task newTask(JobId jid, int i, int m, final boolean hasFailedTasks) { final TaskId tid = Records.newRecord(TaskId.class); tid.setJobId(jid); tid.setId(i); tid.setTaskType(TASK_TYPES.next()); final TaskReport report = newTaskReport(tid); final Map<TaskAttemptId, TaskAttempt> attempts = newTaskAttempts(tid, m); return new Task() { @Override public TaskId getID() { return tid; } @Override public TaskReport getReport() { return report; } @Override public Counters getCounters() { if (hasFailedTasks) { return null; } return new Counters( TypeConverter.fromYarn(report.getCounters())); } @Override public float getProgress() { return report.getProgress(); } @Override public TaskType getType() { return tid.getTaskType(); } @Override public Map<TaskAttemptId, TaskAttempt> getAttempts() { return attempts; } @Override public TaskAttempt getAttempt(TaskAttemptId attemptID) { return attempts.get(attemptID); } @Override public boolean isFinished() { switch (report.getTaskState()) { case SUCCEEDED: case KILLED: case FAILED: return true; } return false; } @Override public boolean canCommit(TaskAttemptId taskAttemptID) { return false; } @Override public TaskState getState() { return report.getTaskState(); } }; }