Java Code Examples for org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo#getFinishedMaps()
The following examples show how to use
org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo#getFinishedMaps() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestJobHistoryParsing.java From hadoop with Apache License 2.0 | 6 votes |
private long computeFinishedMaps(JobInfo jobInfo, int numMaps, int numSuccessfulMaps) { if (numMaps == numSuccessfulMaps) { return jobInfo.getFinishedMaps(); } long numFinishedMaps = 0; Map<org.apache.hadoop.mapreduce.TaskID, TaskInfo> taskInfos = jobInfo .getAllTasks(); for (TaskInfo taskInfo : taskInfos.values()) { if (TaskState.SUCCEEDED.toString().equals(taskInfo.getTaskStatus())) { ++numFinishedMaps; } } return numFinishedMaps; }
Example 2
Source File: TestJobHistoryParsing.java From big-c with Apache License 2.0 | 6 votes |
private long computeFinishedMaps(JobInfo jobInfo, int numMaps, int numSuccessfulMaps) { if (numMaps == numSuccessfulMaps) { return jobInfo.getFinishedMaps(); } long numFinishedMaps = 0; Map<org.apache.hadoop.mapreduce.TaskID, TaskInfo> taskInfos = jobInfo .getAllTasks(); for (TaskInfo taskInfo : taskInfos.values()) { if (TaskState.SUCCEEDED.toString().equals(taskInfo.getTaskStatus())) { ++numFinishedMaps; } } return numFinishedMaps; }
Example 3
Source File: HistoryViewer.java From hadoop with Apache License 2.0 | 4 votes |
/** Generate analysis information for the parsed job */ public AnalyzedJob (JobInfo job) { Map<TaskID, JobHistoryParser.TaskInfo> tasks = job.getAllTasks(); int finishedMaps = (int) job.getFinishedMaps(); int finishedReduces = (int) job.getFinishedReduces(); mapTasks = new JobHistoryParser.TaskAttemptInfo[finishedMaps]; reduceTasks = new JobHistoryParser.TaskAttemptInfo[finishedReduces]; int mapIndex = 0 , reduceIndex=0; avgMapTime = 0; avgReduceTime = 0; avgShuffleTime = 0; for (JobHistoryParser.TaskInfo task : tasks.values()) { Map<TaskAttemptID, JobHistoryParser.TaskAttemptInfo> attempts = task.getAllTaskAttempts(); for (JobHistoryParser.TaskAttemptInfo attempt : attempts.values()) { if (attempt.getTaskStatus(). equals(TaskStatus.State.SUCCEEDED.toString())) { long avgFinishTime = (attempt.getFinishTime() - attempt.getStartTime()); if (attempt.getTaskType().equals(TaskType.MAP)) { mapTasks[mapIndex++] = attempt; avgMapTime += avgFinishTime; } else if (attempt.getTaskType().equals(TaskType.REDUCE)) { reduceTasks[reduceIndex++] = attempt; avgShuffleTime += (attempt.getShuffleFinishTime() - attempt.getStartTime()); avgReduceTime += (attempt.getFinishTime() - attempt.getShuffleFinishTime()); } break; } } } if (finishedMaps > 0) { avgMapTime /= finishedMaps; } if (finishedReduces > 0) { avgReduceTime /= finishedReduces; avgShuffleTime /= finishedReduces; } }
Example 4
Source File: HistoryViewer.java From big-c with Apache License 2.0 | 4 votes |
/** Generate analysis information for the parsed job */ public AnalyzedJob (JobInfo job) { Map<TaskID, JobHistoryParser.TaskInfo> tasks = job.getAllTasks(); int finishedMaps = (int) job.getFinishedMaps(); int finishedReduces = (int) job.getFinishedReduces(); mapTasks = new JobHistoryParser.TaskAttemptInfo[finishedMaps]; reduceTasks = new JobHistoryParser.TaskAttemptInfo[finishedReduces]; int mapIndex = 0 , reduceIndex=0; avgMapTime = 0; avgReduceTime = 0; avgShuffleTime = 0; for (JobHistoryParser.TaskInfo task : tasks.values()) { Map<TaskAttemptID, JobHistoryParser.TaskAttemptInfo> attempts = task.getAllTaskAttempts(); for (JobHistoryParser.TaskAttemptInfo attempt : attempts.values()) { if (attempt.getTaskStatus(). equals(TaskStatus.State.SUCCEEDED.toString())) { long avgFinishTime = (attempt.getFinishTime() - attempt.getStartTime()); if (attempt.getTaskType().equals(TaskType.MAP)) { mapTasks[mapIndex++] = attempt; avgMapTime += avgFinishTime; } else if (attempt.getTaskType().equals(TaskType.REDUCE)) { reduceTasks[reduceIndex++] = attempt; avgShuffleTime += (attempt.getShuffleFinishTime() - attempt.getStartTime()); avgReduceTime += (attempt.getFinishTime() - attempt.getShuffleFinishTime()); } break; } } } if (finishedMaps > 0) { avgMapTime /= finishedMaps; } if (finishedReduces > 0) { avgReduceTime /= finishedReduces; avgShuffleTime /= finishedReduces; } }