com.mongodb.hadoop.MongoInputFormat Java Examples
The following examples show how to use
com.mongodb.hadoop.MongoInputFormat.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: BulkImportJob.java From zerowing with MIT License | 5 votes |
private void setupJob() { _job.setInputFormatClass(MongoInputFormat.class); _job.setMapperClass(BulkImportMapper.class); _job.setMapOutputKeyClass(ImmutableBytesWritable.class); _job.setMapOutputValueClass(Put.class); MongoConfigUtil.setInputURI(getConfiguration(), _mongoURI); MongoConfigUtil.setReadSplitsFromSecondary(getConfiguration(), true); }
Example #2
Source File: MongoExtractor.java From deep-spark with Apache License 2.0 | 4 votes |
/** * Instantiates a new Mongo extractor. */ public MongoExtractor() { super(); this.inputFormat = new MongoInputFormat(); this.outputFormat = new MongoOutputFormat(); }
Example #3
Source File: MapReduceExercise.java From mongodb-hadoop-workshop with Apache License 2.0 | 4 votes |
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException { if(args.length < 3) { System.err.println("Usage: MapReduceExercise " + "[mongodb input uri] " + "[mongodb output uri] " + "update=[true or false]"); System.err.println("Example: MapReduceExercise " + "mongodb://127.0.0.1:27017/movielens.ratings " + "mongodb://127.0.0.1:27017/movielens.ratings.stats update=false"); System.err.println("Example: MapReduceExercise " + "mongodb://127.0.0.1:27017/movielens.ratings " + "mongodb://127.0.0.1:27017/movielens.movies update=true"); System.exit(-1); } Class outputValueClass = BSONWritable.class; Class reducerClass = Reduce.class; if(args[2].equals("update=true")) { outputValueClass = MongoUpdateWritable.class; reducerClass = ReduceUpdater.class; } Configuration conf = new Configuration(); // Set MongoDB-specific configuration items conf.setClass("mongo.job.mapper", Map.class, Mapper.class); conf.setClass("mongo.job.reducer", reducerClass, Reducer.class); conf.setClass("mongo.job.mapper.output.key", IntWritable.class, Object.class); conf.setClass("mongo.job.mapper.output.value", DoubleWritable.class, Object.class); conf.setClass("mongo.job.output.key", NullWritable.class, Object.class); conf.setClass("mongo.job.output.value", outputValueClass, Object.class); conf.set("mongo.input.uri", args[0]); conf.set("mongo.output.uri", args[1]); Job job = Job.getInstance(conf); // Set Hadoop-specific job parameters job.setInputFormatClass(MongoInputFormat.class); job.setOutputFormatClass(MongoOutputFormat.class); job.setMapOutputKeyClass(IntWritable.class); job.setMapOutputValueClass(DoubleWritable.class); job.setOutputKeyClass(NullWritable.class); job.setOutputValueClass(outputValueClass); job.setMapperClass(Map.class); job.setReducerClass(reducerClass); job.setJarByClass(MapReduceExercise.class); job.submit(); }