Java Code Examples for org.apache.hadoop.tools.rumen.JobStory#getNumberMaps()
The following examples show how to use
org.apache.hadoop.tools.rumen.JobStory#getNumberMaps() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: Statistics.java From hadoop with Apache License 2.0 | 6 votes |
/** * Generates a job stats. */ public static JobStats generateJobStats(Job job, JobStory jobdesc) { int seq = GridmixJob.getJobSeqId(job); // bail out if job description is missing for a job to be simulated if (seq >= 0 && jobdesc == null) { throw new IllegalArgumentException("JobStory not available for job " + job.getJobID()); } int maps = -1; int reds = -1; if (jobdesc != null) { // Note that the ZombieJob will return a >= 0 value maps = jobdesc.getNumberMaps(); reds = jobdesc.getNumberReduces(); } return new JobStats(maps, reds, job); }
Example 2
Source File: Statistics.java From big-c with Apache License 2.0 | 6 votes |
/** * Generates a job stats. */ public static JobStats generateJobStats(Job job, JobStory jobdesc) { int seq = GridmixJob.getJobSeqId(job); // bail out if job description is missing for a job to be simulated if (seq >= 0 && jobdesc == null) { throw new IllegalArgumentException("JobStory not available for job " + job.getJobID()); } int maps = -1; int reds = -1; if (jobdesc != null) { // Note that the ZombieJob will return a >= 0 value maps = jobdesc.getNumberMaps(); reds = jobdesc.getNumberReduces(); } return new JobStats(maps, reds, job); }
Example 3
Source File: TestGridmixSubmission.java From hadoop with Apache License 2.0 | 5 votes |
/** * Verifies that the given {@code JobStory} corresponds to the checked-in * WordCount {@code JobStory}. The verification is effected via JUnit * assertions. * * @param js the candidate JobStory. */ private void verifyWordCountJobStory(JobStory js) { assertNotNull("Null JobStory", js); String expectedJobStory = "WordCount:johndoe:default:1285322645148:3:1"; String actualJobStory = js.getName() + ":" + js.getUser() + ":" + js.getQueueName() + ":" + js.getSubmissionTime() + ":" + js.getNumberMaps() + ":" + js.getNumberReduces(); assertEquals("Unexpected JobStory", expectedJobStory, actualJobStory); }
Example 4
Source File: TestGridmixSubmission.java From big-c with Apache License 2.0 | 5 votes |
/** * Verifies that the given {@code JobStory} corresponds to the checked-in * WordCount {@code JobStory}. The verification is effected via JUnit * assertions. * * @param js the candidate JobStory. */ private void verifyWordCountJobStory(JobStory js) { assertNotNull("Null JobStory", js); String expectedJobStory = "WordCount:johndoe:default:1285322645148:3:1"; String actualJobStory = js.getName() + ":" + js.getUser() + ":" + js.getQueueName() + ":" + js.getSubmissionTime() + ":" + js.getNumberMaps() + ":" + js.getNumberReduces(); assertEquals("Unexpected JobStory", expectedJobStory, actualJobStory); }
Example 5
Source File: JobFactory.java From hadoop with Apache License 2.0 | 4 votes |
protected JobStory getNextJobFiltered() throws IOException { JobStory job = getNextJobFromTrace(); // filter out the following jobs // - unsuccessful jobs // - jobs with missing submit-time // - reduce only jobs // These jobs are not yet supported in Gridmix while (job != null && (job.getOutcome() != Pre21JobHistoryConstants.Values.SUCCESS || job.getSubmissionTime() < 0 || job.getNumberMaps() == 0)) { if (LOG.isDebugEnabled()) { List<String> reason = new ArrayList<String>(); if (job.getOutcome() != Pre21JobHistoryConstants.Values.SUCCESS) { reason.add("STATE (" + job.getOutcome().name() + ")"); } if (job.getSubmissionTime() < 0) { reason.add("SUBMISSION-TIME (" + job.getSubmissionTime() + ")"); } if (job.getNumberMaps() == 0) { reason.add("ZERO-MAPS-JOB"); } // TODO This should never happen. Probably we missed something! if (reason.size() == 0) { reason.add("N/A"); } LOG.debug("Ignoring job " + job.getJobID() + " from the input trace." + " Reason: " + StringUtils.join(reason, ",")); } job = getNextJobFromTrace(); } return null == job ? null : new FilterJobStory(job) { @Override public TaskInfo getTaskInfo(TaskType taskType, int taskNumber) { TaskInfo info = this.job.getTaskInfo(taskType, taskNumber); if (info != null) { info = new MinTaskInfo(info); } else { info = new MinTaskInfo(new TaskInfo(0, 0, 0, 0, 0)); } return info; } }; }
Example 6
Source File: JobFactory.java From big-c with Apache License 2.0 | 4 votes |
protected JobStory getNextJobFiltered() throws IOException { JobStory job = getNextJobFromTrace(); // filter out the following jobs // - unsuccessful jobs // - jobs with missing submit-time // - reduce only jobs // These jobs are not yet supported in Gridmix while (job != null && (job.getOutcome() != Pre21JobHistoryConstants.Values.SUCCESS || job.getSubmissionTime() < 0 || job.getNumberMaps() == 0)) { if (LOG.isDebugEnabled()) { List<String> reason = new ArrayList<String>(); if (job.getOutcome() != Pre21JobHistoryConstants.Values.SUCCESS) { reason.add("STATE (" + job.getOutcome().name() + ")"); } if (job.getSubmissionTime() < 0) { reason.add("SUBMISSION-TIME (" + job.getSubmissionTime() + ")"); } if (job.getNumberMaps() == 0) { reason.add("ZERO-MAPS-JOB"); } // TODO This should never happen. Probably we missed something! if (reason.size() == 0) { reason.add("N/A"); } LOG.debug("Ignoring job " + job.getJobID() + " from the input trace." + " Reason: " + StringUtils.join(reason, ",")); } job = getNextJobFromTrace(); } return null == job ? null : new FilterJobStory(job) { @Override public TaskInfo getTaskInfo(TaskType taskType, int taskNumber) { TaskInfo info = this.job.getTaskInfo(taskType, taskNumber); if (info != null) { info = new MinTaskInfo(info); } else { info = new MinTaskInfo(new TaskInfo(0, 0, 0, 0, 0)); } return info; } }; }
Example 7
Source File: SimulatorJobInProgress.java From RDFS with Apache License 2.0 | 4 votes |
@SuppressWarnings("deprecation") public SimulatorJobInProgress(JobID jobid, JobTracker jobtracker, JobConf default_conf, JobStory jobStory) { super(jobid, jobStory.getJobConf(), jobtracker); // jobSetupCleanupNeeded set to false in parent cstr, though // default is true restartCount = 0; jobSetupCleanupNeeded = false; this.memoryPerMap = conf.getMemoryForMapTask(); this.memoryPerReduce = conf.getMemoryForReduceTask(); this.maxTaskFailuresPerTracker = conf.getMaxTaskFailuresPerTracker(); this.jobId = jobid; String url = "http://" + jobtracker.getJobTrackerMachine() + ":" + jobtracker.getInfoPort() + "/jobdetails.jsp?jobid=" + jobid; this.jobtracker = jobtracker; this.conf = jobStory.getJobConf(); this.priority = conf.getJobPriority(); Path jobDir = jobtracker.getSystemDirectoryForJob(jobid); this.jobFile = new Path(jobDir, "job.xml"); this.status = new JobStatus(jobid, 0.0f, 0.0f, 0.0f, 0.0f, JobStatus.PREP, priority, conf.getUser()); this.profile = new JobProfile(jobStory.getUser(), jobid, this.jobFile .toString(), url, jobStory.getName(), conf.getQueueName()); this.startTime = JobTracker.getClock().getTime(); status.setStartTime(startTime); this.resourceEstimator = new ResourceEstimator(this); this.numMapTasks = jobStory.getNumberMaps(); this.numReduceTasks = jobStory.getNumberReduces(); this.taskCompletionEvents = new ArrayList<TaskCompletionEvent>(numMapTasks + numReduceTasks + 10); this.mapFailuresPercent = conf.getMaxMapTaskFailuresPercent(); this.reduceFailuresPercent = conf.getMaxReduceTaskFailuresPercent(); MetricsContext metricsContext = MetricsUtil.getContext("mapred"); this.jobMetrics = MetricsUtil.createRecord(metricsContext, "job"); this.jobMetrics.setTag("user", conf.getUser()); this.jobMetrics.setTag("sessionId", conf.getSessionId()); this.jobMetrics.setTag("jobName", conf.getJobName()); this.jobMetrics.setTag("jobId", jobid.toString()); this.maxLevel = jobtracker.getNumTaskCacheLevels(); this.anyCacheLevel = this.maxLevel + 1; this.nonLocalMaps = new LinkedList<TaskInProgress>(); this.nonLocalRunningMaps = new LinkedHashSet<TaskInProgress>(); this.runningMapCache = new IdentityHashMap<Node, Set<TaskInProgress>>(); this.nonRunningReduces = new LinkedList<TaskInProgress>(); this.runningReduces = new LinkedHashSet<TaskInProgress>(); this.slowTaskThreshold = Math.max(0.0f, conf.getFloat( "mapred.speculative.execution.slowTaskThreshold", 1.0f)); this.speculativeCap = conf.getFloat( "mapred.speculative.execution.speculativeCap", 0.1f); this.slowNodeThreshold = conf.getFloat( "mapred.speculative.execution.slowNodeThreshold", 1.0f); this.jobStory = jobStory; // this.jobHistory = this.jobtracker.getJobHistory(); }