Java Code Examples for org.apache.parquet.column.ParquetProperties.WriterVersion#values()
The following examples show how to use
org.apache.parquet.column.ParquetProperties.WriterVersion#values() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: FileEncodingsIT.java From parquet-mr with Apache License 2.0 | 6 votes |
@Test public void testFileEncodingsWithoutDictionary() throws Exception { final boolean DISABLE_DICTIONARY = false; List<?> randomValues; randomValues = generateRandomValues(this.paramTypeName, RECORD_COUNT); /* Run an encoding test per each writer version. * This loop will make sure to test future writer versions added to WriterVersion enum. */ for (WriterVersion writerVersion : WriterVersion.values()) { System.out.println(String.format("Testing %s/%s/%s encodings using ROW_GROUP_SIZE=%d PAGE_SIZE=%d", writerVersion, this.paramTypeName, this.compression, TEST_ROW_GROUP_SIZE, TEST_PAGE_SIZE)); Path parquetFile = createTempFile(); writeValuesToFile(parquetFile, this.paramTypeName, randomValues, TEST_ROW_GROUP_SIZE, TEST_PAGE_SIZE, DISABLE_DICTIONARY, writerVersion); PageGroupValidator.validatePages(parquetFile, randomValues); } }
Example 2
Source File: FileEncodingsIT.java From parquet-mr with Apache License 2.0 | 6 votes |
@Test public void testFileEncodingsWithDictionary() throws Exception { final boolean ENABLE_DICTIONARY = true; List<?> dictionaryValues = generateDictionaryValues(this.paramTypeName, RECORD_COUNT); /* Run an encoding test per each writer version. * This loop will make sure to test future writer versions added to WriterVersion enum. */ for (WriterVersion writerVersion : WriterVersion.values()) { System.out.println(String.format("Testing %s/%s/%s + DICTIONARY encodings using ROW_GROUP_SIZE=%d PAGE_SIZE=%d", writerVersion, this.paramTypeName, this.compression, TEST_ROW_GROUP_SIZE, TEST_PAGE_SIZE)); Path parquetFile = createTempFile(); writeValuesToFile(parquetFile, this.paramTypeName, dictionaryValues, TEST_ROW_GROUP_SIZE, TEST_PAGE_SIZE, ENABLE_DICTIONARY, writerVersion); PageGroupValidator.validatePages(parquetFile, dictionaryValues); } }
Example 3
Source File: TestParquetWriterNewPage.java From parquet-mr with Apache License 2.0 | 4 votes |
@Test public void test() throws Exception { Configuration conf = new Configuration(); Path root = new Path("target/tests/TestParquetWriter/"); FileSystem fs = root.getFileSystem(conf); if (fs.exists(root)) { fs.delete(root, true); } fs.mkdirs(root); MessageType schema = parseMessageType( "message test { " + "required binary binary_field; " + "required int32 int32_field; " + "required int64 int64_field; " + "required boolean boolean_field; " + "required float float_field; " + "required double double_field; " + "required fixed_len_byte_array(3) flba_field; " + "required int96 int96_field; " + "optional binary null_field; " + "} "); GroupWriteSupport.setSchema(schema, conf); SimpleGroupFactory f = new SimpleGroupFactory(schema); Map<String, Encoding> expected = new HashMap<String, Encoding>(); expected.put("10-" + PARQUET_1_0, PLAIN_DICTIONARY); expected.put("1000-" + PARQUET_1_0, PLAIN); expected.put("10-" + PARQUET_2_0, RLE_DICTIONARY); expected.put("1000-" + PARQUET_2_0, DELTA_BYTE_ARRAY); for (int modulo : asList(10, 1000)) { for (WriterVersion version : WriterVersion.values()) { Path file = new Path(root, version.name() + "_" + modulo); ParquetWriter<Group> writer = new ParquetWriter<Group>( file, new GroupWriteSupport(), UNCOMPRESSED, 1024, 1024, 512, true, false, version, conf); for (int i = 0; i < 1000; i++) { writer.write( f.newGroup() .append("binary_field", "test" + (i % modulo)) .append("int32_field", 32) .append("int64_field", 64l) .append("boolean_field", true) .append("float_field", 1.0f) .append("double_field", 2.0d) .append("flba_field", "foo") .append("int96_field", Binary.fromConstantByteArray(new byte[12]))); } writer.close(); ParquetReader<Group> reader = ParquetReader.builder(new GroupReadSupport(), file).withConf(conf).build(); for (int i = 0; i < 1000; i++) { Group group = reader.read(); assertEquals("test" + (i % modulo), group.getBinary("binary_field", 0).toStringUsingUTF8()); assertEquals(32, group.getInteger("int32_field", 0)); assertEquals(64l, group.getLong("int64_field", 0)); assertEquals(true, group.getBoolean("boolean_field", 0)); assertEquals(1.0f, group.getFloat("float_field", 0), 0.001); assertEquals(2.0d, group.getDouble("double_field", 0), 0.001); assertEquals("foo", group.getBinary("flba_field", 0).toStringUsingUTF8()); assertEquals(Binary.fromConstantByteArray(new byte[12]), group.getInt96("int96_field", 0)); assertEquals(0, group.getFieldRepetitionCount("null_field")); } reader.close(); ParquetMetadata footer = readFooter(conf, file, NO_FILTER); for (BlockMetaData blockMetaData : footer.getBlocks()) { for (ColumnChunkMetaData column : blockMetaData.getColumns()) { if (column.getPath().toDotString().equals("binary_field")) { String key = modulo + "-" + version; Encoding expectedEncoding = expected.get(key); assertTrue( key + ":" + column.getEncodings() + " should contain " + expectedEncoding, column.getEncodings().contains(expectedEncoding)); } } } } } }
Example 4
Source File: TestParquetWriter.java From parquet-mr with Apache License 2.0 | 4 votes |
@Test public void test() throws Exception { Configuration conf = new Configuration(); Path root = new Path("target/tests/TestParquetWriter/"); enforceEmptyDir(conf, root); MessageType schema = parseMessageType( "message test { " + "required binary binary_field; " + "required int32 int32_field; " + "required int64 int64_field; " + "required boolean boolean_field; " + "required float float_field; " + "required double double_field; " + "required fixed_len_byte_array(3) flba_field; " + "required int96 int96_field; " + "} "); GroupWriteSupport.setSchema(schema, conf); SimpleGroupFactory f = new SimpleGroupFactory(schema); Map<String, Encoding> expected = new HashMap<String, Encoding>(); expected.put("10-" + PARQUET_1_0, PLAIN_DICTIONARY); expected.put("1000-" + PARQUET_1_0, PLAIN); expected.put("10-" + PARQUET_2_0, RLE_DICTIONARY); expected.put("1000-" + PARQUET_2_0, DELTA_BYTE_ARRAY); for (int modulo : asList(10, 1000)) { for (WriterVersion version : WriterVersion.values()) { Path file = new Path(root, version.name() + "_" + modulo); ParquetWriter<Group> writer = new ParquetWriter<Group>( file, new GroupWriteSupport(), UNCOMPRESSED, 1024, 1024, 512, true, false, version, conf); for (int i = 0; i < 1000; i++) { writer.write( f.newGroup() .append("binary_field", "test" + (i % modulo)) .append("int32_field", 32) .append("int64_field", 64l) .append("boolean_field", true) .append("float_field", 1.0f) .append("double_field", 2.0d) .append("flba_field", "foo") .append("int96_field", Binary.fromConstantByteArray(new byte[12]))); } writer.close(); ParquetReader<Group> reader = ParquetReader.builder(new GroupReadSupport(), file).withConf(conf).build(); for (int i = 0; i < 1000; i++) { Group group = reader.read(); assertEquals("test" + (i % modulo), group.getBinary("binary_field", 0).toStringUsingUTF8()); assertEquals(32, group.getInteger("int32_field", 0)); assertEquals(64l, group.getLong("int64_field", 0)); assertEquals(true, group.getBoolean("boolean_field", 0)); assertEquals(1.0f, group.getFloat("float_field", 0), 0.001); assertEquals(2.0d, group.getDouble("double_field", 0), 0.001); assertEquals("foo", group.getBinary("flba_field", 0).toStringUsingUTF8()); assertEquals(Binary.fromConstantByteArray(new byte[12]), group.getInt96("int96_field",0)); } reader.close(); ParquetMetadata footer = readFooter(conf, file, NO_FILTER); for (BlockMetaData blockMetaData : footer.getBlocks()) { for (ColumnChunkMetaData column : blockMetaData.getColumns()) { if (column.getPath().toDotString().equals("binary_field")) { String key = modulo + "-" + version; Encoding expectedEncoding = expected.get(key); assertTrue( key + ":" + column.getEncodings() + " should contain " + expectedEncoding, column.getEncodings().contains(expectedEncoding)); } } } assertEquals("Object model property should be example", "example", footer.getFileMetaData().getKeyValueMetaData() .get(ParquetWriter.OBJECT_MODEL_NAME_PROP)); } } }