org.apache.hadoop.mapred.lib.db.DBInputFormat.NullDBWritable Java Examples
The following examples show how to use
org.apache.hadoop.mapred.lib.db.DBInputFormat.NullDBWritable.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TestDBInputFormat.java From hadoop with Apache License 2.0 | 6 votes |
/** * test DBInputFormat class. Class should split result for chunks * @throws Exception */ @Test(timeout = 10000) public void testDBInputFormat() throws Exception { JobConf configuration = new JobConf(); setupDriver(configuration); DBInputFormat<NullDBWritable> format = new DBInputFormat<NullDBWritable>(); format.setConf(configuration); format.setConf(configuration); DBInputFormat.DBInputSplit splitter = new DBInputFormat.DBInputSplit(1, 10); Reporter reporter = mock(Reporter.class); RecordReader<LongWritable, NullDBWritable> reader = format.getRecordReader( splitter, configuration, reporter); configuration.setInt(MRJobConfig.NUM_MAPS, 3); InputSplit[] lSplits = format.getSplits(configuration, 3); assertEquals(5, lSplits[0].getLength()); assertEquals(3, lSplits.length); // test reader .Some simple tests assertEquals(LongWritable.class, reader.createKey().getClass()); assertEquals(0, reader.getPos()); assertEquals(0, reader.getProgress(), 0.001); reader.close(); }
Example #2
Source File: TestDBInputFormat.java From hadoop with Apache License 2.0 | 6 votes |
/** * * test DBRecordReader. This reader should creates keys, values, know about position.. */ @SuppressWarnings("unchecked") @Test (timeout = 5000) public void testDBRecordReader() throws Exception { JobConf job = mock(JobConf.class); DBConfiguration dbConfig = mock(DBConfiguration.class); String[] fields = { "field1", "filed2" }; @SuppressWarnings("rawtypes") DBRecordReader reader = new DBInputFormat<NullDBWritable>().new DBRecordReader( new DBInputSplit(), NullDBWritable.class, job, DriverForTest.getConnection(), dbConfig, "condition", fields, "table"); LongWritable key = reader.createKey(); assertEquals(0, key.get()); DBWritable value = reader.createValue(); assertEquals( "org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable", value .getClass().getName()); assertEquals(0, reader.getPos()); assertFalse(reader.next(key, value)); }
Example #3
Source File: TestDBInputFormat.java From big-c with Apache License 2.0 | 6 votes |
/** * test DBInputFormat class. Class should split result for chunks * @throws Exception */ @Test(timeout = 10000) public void testDBInputFormat() throws Exception { JobConf configuration = new JobConf(); setupDriver(configuration); DBInputFormat<NullDBWritable> format = new DBInputFormat<NullDBWritable>(); format.setConf(configuration); format.setConf(configuration); DBInputFormat.DBInputSplit splitter = new DBInputFormat.DBInputSplit(1, 10); Reporter reporter = mock(Reporter.class); RecordReader<LongWritable, NullDBWritable> reader = format.getRecordReader( splitter, configuration, reporter); configuration.setInt(MRJobConfig.NUM_MAPS, 3); InputSplit[] lSplits = format.getSplits(configuration, 3); assertEquals(5, lSplits[0].getLength()); assertEquals(3, lSplits.length); // test reader .Some simple tests assertEquals(LongWritable.class, reader.createKey().getClass()); assertEquals(0, reader.getPos()); assertEquals(0, reader.getProgress(), 0.001); reader.close(); }
Example #4
Source File: TestDBInputFormat.java From big-c with Apache License 2.0 | 6 votes |
/** * * test DBRecordReader. This reader should creates keys, values, know about position.. */ @SuppressWarnings("unchecked") @Test (timeout = 5000) public void testDBRecordReader() throws Exception { JobConf job = mock(JobConf.class); DBConfiguration dbConfig = mock(DBConfiguration.class); String[] fields = { "field1", "filed2" }; @SuppressWarnings("rawtypes") DBRecordReader reader = new DBInputFormat<NullDBWritable>().new DBRecordReader( new DBInputSplit(), NullDBWritable.class, job, DriverForTest.getConnection(), dbConfig, "condition", fields, "table"); LongWritable key = reader.createKey(); assertEquals(0, key.get()); DBWritable value = reader.createValue(); assertEquals( "org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable", value .getClass().getName()); assertEquals(0, reader.getPos()); assertFalse(reader.next(key, value)); }
Example #5
Source File: DBConfiguration.java From RDFS with Apache License 2.0 | 4 votes |
Class<?> getInputClass() { return job.getClass(DBConfiguration.INPUT_CLASS_PROPERTY, NullDBWritable.class); }
Example #6
Source File: DBConfiguration.java From hadoop-gpu with Apache License 2.0 | 4 votes |
Class<?> getInputClass() { return job.getClass(DBConfiguration.INPUT_CLASS_PROPERTY, NullDBWritable.class); }