Java Code Examples for org.apache.hadoop.mapred.JobConf#setMaxMapAttempts()
The following examples show how to use
org.apache.hadoop.mapred.JobConf#setMaxMapAttempts() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: NNBench.java From hadoop with Apache License 2.0 | 8 votes |
/** * Run the test * * @throws IOException on error */ public static void runTests() throws IOException { config.setLong("io.bytes.per.checksum", bytesPerChecksum); JobConf job = new JobConf(config, NNBench.class); job.setJobName("NNBench-" + operation); FileInputFormat.setInputPaths(job, new Path(baseDir, CONTROL_DIR_NAME)); job.setInputFormat(SequenceFileInputFormat.class); // Explicitly set number of max map attempts to 1. job.setMaxMapAttempts(1); // Explicitly turn off speculative execution job.setSpeculativeExecution(false); job.setMapperClass(NNBenchMapper.class); job.setReducerClass(NNBenchReducer.class); FileOutputFormat.setOutputPath(job, new Path(baseDir, OUTPUT_DIR_NAME)); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); job.setNumReduceTasks((int) numberOfReduces); JobClient.runJob(job); }
Example 2
Source File: NNBench.java From big-c with Apache License 2.0 | 6 votes |
/** * Run the test * * @throws IOException on error */ public static void runTests() throws IOException { config.setLong("io.bytes.per.checksum", bytesPerChecksum); JobConf job = new JobConf(config, NNBench.class); job.setJobName("NNBench-" + operation); FileInputFormat.setInputPaths(job, new Path(baseDir, CONTROL_DIR_NAME)); job.setInputFormat(SequenceFileInputFormat.class); // Explicitly set number of max map attempts to 1. job.setMaxMapAttempts(1); // Explicitly turn off speculative execution job.setSpeculativeExecution(false); job.setMapperClass(NNBenchMapper.class); job.setReducerClass(NNBenchReducer.class); FileOutputFormat.setOutputPath(job, new Path(baseDir, OUTPUT_DIR_NAME)); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); job.setNumReduceTasks((int) numberOfReduces); JobClient.runJob(job); }
Example 3
Source File: NNBench.java From RDFS with Apache License 2.0 | 6 votes |
/** * Run the test * * @throws IOException on error */ public static void runTests(Configuration config) throws IOException { config.setLong("io.bytes.per.checksum", bytesPerChecksum); JobConf job = new JobConf(config, NNBench.class); job.setJobName("NNBench-" + operation); FileInputFormat.setInputPaths(job, new Path(baseDir, CONTROL_DIR_NAME)); job.setInputFormat(SequenceFileInputFormat.class); // Explicitly set number of max map attempts to 1. job.setMaxMapAttempts(1); // Explicitly turn off speculative execution job.setSpeculativeExecution(false); job.setMapperClass(NNBenchMapper.class); job.setReducerClass(NNBenchReducer.class); FileOutputFormat.setOutputPath(job, new Path(baseDir, OUTPUT_DIR_NAME)); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); job.setNumReduceTasks((int) numberOfReduces); JobClient.runJob(job); }
Example 4
Source File: NNBench.java From hadoop-gpu with Apache License 2.0 | 6 votes |
/** * Run the test * * @throws IOException on error */ public static void runTests() throws IOException { config.setLong("io.bytes.per.checksum", bytesPerChecksum); JobConf job = new JobConf(config, NNBench.class); job.setJobName("NNBench-" + operation); FileInputFormat.setInputPaths(job, new Path(baseDir, CONTROL_DIR_NAME)); job.setInputFormat(SequenceFileInputFormat.class); // Explicitly set number of max map attempts to 1. job.setMaxMapAttempts(1); // Explicitly turn off speculative execution job.setSpeculativeExecution(false); job.setMapperClass(NNBenchMapper.class); job.setReducerClass(NNBenchReducer.class); FileOutputFormat.setOutputPath(job, new Path(baseDir, OUTPUT_DIR_NAME)); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); job.setNumReduceTasks((int) numberOfReduces); JobClient.runJob(job); }
Example 5
Source File: TestMROldApiJobs.java From hadoop with Apache License 2.0 | 5 votes |
public static void runJobFail(JobConf conf, Path inDir, Path outDir) throws IOException, InterruptedException { conf.setJobName("test-job-fail"); conf.setMapperClass(FailMapper.class); conf.setJarByClass(FailMapper.class); conf.setReducerClass(IdentityReducer.class); conf.setMaxMapAttempts(1); boolean success = runJob(conf, inDir, outDir, 1, 0); Assert.assertFalse("Job expected to fail succeeded", success); }
Example 6
Source File: TestMROldApiJobs.java From big-c with Apache License 2.0 | 5 votes |
public static void runJobFail(JobConf conf, Path inDir, Path outDir) throws IOException, InterruptedException { conf.setJobName("test-job-fail"); conf.setMapperClass(FailMapper.class); conf.setJarByClass(FailMapper.class); conf.setReducerClass(IdentityReducer.class); conf.setMaxMapAttempts(1); boolean success = runJob(conf, inDir, outDir, 1, 0); Assert.assertFalse("Job expected to fail succeeded", success); }
Example 7
Source File: LoadGeneratorMR.java From hadoop with Apache License 2.0 | 4 votes |
/** * Based on args we submit the LoadGenerator as MR job. * Number of MapTasks is numMapTasks * @return exitCode for job submission */ private int submitAsMapReduce() { System.out.println("Running as a MapReduce job with " + numMapTasks + " mapTasks; Output to file " + mrOutDir); Configuration conf = new Configuration(getConf()); // First set all the args of LoadGenerator as Conf vars to pass to MR tasks conf.set(LG_ROOT , root.toString()); conf.setInt(LG_MAXDELAYBETWEENOPS, maxDelayBetweenOps); conf.setInt(LG_NUMOFTHREADS, numOfThreads); conf.set(LG_READPR, readProbs[0]+""); //Pass Double as string conf.set(LG_WRITEPR, writeProbs[0]+""); //Pass Double as string conf.setLong(LG_SEED, seed); //No idea what this is conf.setInt(LG_NUMMAPTASKS, numMapTasks); if (scriptFile == null && durations[0] <=0) { System.err.println("When run as a MapReduce job, elapsed Time or ScriptFile must be specified"); System.exit(-1); } conf.setLong(LG_ELAPSEDTIME, durations[0]); conf.setLong(LG_STARTTIME, startTime); if (scriptFile != null) { conf.set(LG_SCRIPTFILE , scriptFile); } conf.set(LG_FLAGFILE, flagFile.toString()); // Now set the necessary conf variables that apply to run MR itself. JobConf jobConf = new JobConf(conf, LoadGenerator.class); jobConf.setJobName("NNLoadGeneratorViaMR"); jobConf.setNumMapTasks(numMapTasks); jobConf.setNumReduceTasks(1); // 1 reducer to collect the results jobConf.setOutputKeyClass(Text.class); jobConf.setOutputValueClass(IntWritable.class); jobConf.setMapperClass(MapperThatRunsNNLoadGenerator.class); jobConf.setReducerClass(ReducerThatCollectsLGdata.class); jobConf.setInputFormat(DummyInputFormat.class); jobConf.setOutputFormat(TextOutputFormat.class); // Explicitly set number of max map attempts to 1. jobConf.setMaxMapAttempts(1); // Explicitly turn off speculative execution jobConf.setSpeculativeExecution(false); // This mapReduce job has no input but has output FileOutputFormat.setOutputPath(jobConf, new Path(mrOutDir)); try { JobClient.runJob(jobConf); } catch (IOException e) { System.err.println("Failed to run job: " + e.getMessage()); return -1; } return 0; }
Example 8
Source File: LoadGeneratorMR.java From big-c with Apache License 2.0 | 4 votes |
/** * Based on args we submit the LoadGenerator as MR job. * Number of MapTasks is numMapTasks * @return exitCode for job submission */ private int submitAsMapReduce() { System.out.println("Running as a MapReduce job with " + numMapTasks + " mapTasks; Output to file " + mrOutDir); Configuration conf = new Configuration(getConf()); // First set all the args of LoadGenerator as Conf vars to pass to MR tasks conf.set(LG_ROOT , root.toString()); conf.setInt(LG_MAXDELAYBETWEENOPS, maxDelayBetweenOps); conf.setInt(LG_NUMOFTHREADS, numOfThreads); conf.set(LG_READPR, readProbs[0]+""); //Pass Double as string conf.set(LG_WRITEPR, writeProbs[0]+""); //Pass Double as string conf.setLong(LG_SEED, seed); //No idea what this is conf.setInt(LG_NUMMAPTASKS, numMapTasks); if (scriptFile == null && durations[0] <=0) { System.err.println("When run as a MapReduce job, elapsed Time or ScriptFile must be specified"); System.exit(-1); } conf.setLong(LG_ELAPSEDTIME, durations[0]); conf.setLong(LG_STARTTIME, startTime); if (scriptFile != null) { conf.set(LG_SCRIPTFILE , scriptFile); } conf.set(LG_FLAGFILE, flagFile.toString()); // Now set the necessary conf variables that apply to run MR itself. JobConf jobConf = new JobConf(conf, LoadGenerator.class); jobConf.setJobName("NNLoadGeneratorViaMR"); jobConf.setNumMapTasks(numMapTasks); jobConf.setNumReduceTasks(1); // 1 reducer to collect the results jobConf.setOutputKeyClass(Text.class); jobConf.setOutputValueClass(IntWritable.class); jobConf.setMapperClass(MapperThatRunsNNLoadGenerator.class); jobConf.setReducerClass(ReducerThatCollectsLGdata.class); jobConf.setInputFormat(DummyInputFormat.class); jobConf.setOutputFormat(TextOutputFormat.class); // Explicitly set number of max map attempts to 1. jobConf.setMaxMapAttempts(1); // Explicitly turn off speculative execution jobConf.setSpeculativeExecution(false); // This mapReduce job has no input but has output FileOutputFormat.setOutputPath(jobConf, new Path(mrOutDir)); try { JobClient.runJob(jobConf); } catch (IOException e) { System.err.println("Failed to run job: " + e.getMessage()); return -1; } return 0; }