Java Code Examples for org.apache.avro.generic.GenericData.Record#put()
The following examples show how to use
org.apache.avro.generic.GenericData.Record#put() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestReadProjection.java From iceberg with Apache License 2.0 | 6 votes |
@Test public void testEmptyProjection() throws Exception { Schema schema = new Schema( Types.NestedField.required(0, "id", Types.LongType.get()), Types.NestedField.optional(1, "data", Types.StringType.get()) ); Record record = new Record(AvroSchemaUtil.convert(schema, "table")); record.put("id", 34L); record.put("data", "test"); Record projected = writeAndRead("empty_projection", schema, schema.select(), record); Assert.assertNotNull("Should read a non-null record", projected); try { projected.get(0); Assert.fail("Should not retrieve value with ordinal 0"); } catch (ArrayIndexOutOfBoundsException e) { // this is expected because there are no values } }
Example 2
Source File: TestConvertAvroSchema.java From nifi with Apache License 2.0 | 6 votes |
private Record convertBasic(Record inputRecord, Locale locale) { Record result = new Record(OUTPUT_SCHEMA); result.put("id", Long.parseLong(inputRecord.get("id").toString())); result.put("color", inputRecord.get("primaryColor").toString()); if (inputRecord.get("price") == null) { result.put("price", null); } else { final NumberFormat format = NumberFormat.getInstance(locale); double price; try { price = format.parse(inputRecord.get("price").toString()).doubleValue(); } catch (ParseException e) { // Shouldn't happen throw new RuntimeException(e); } result.put("price", price); } return result; }
Example 3
Source File: TestReadProjection.java From iceberg with Apache License 2.0 | 6 votes |
@Test public void testFullProjection() throws Exception { Schema schema = new Schema( Types.NestedField.required(0, "id", Types.LongType.get()), Types.NestedField.optional(1, "data", Types.StringType.get()) ); Record record = new Record(AvroSchemaUtil.convert(schema, "table")); record.put("id", 34L); record.put("data", "test"); Record projected = writeAndRead("full_projection", schema, schema, record); Assert.assertEquals("Should contain the correct id value", 34L, (long) projected.get("id")); int cmp = Comparators.charSequences() .compare("test", (CharSequence) projected.get("data")); Assert.assertTrue("Should contain the correct data value", cmp == 0); }
Example 4
Source File: TestAvroRecordConverter.java From localization_nifi with Apache License 2.0 | 6 votes |
/** * Tests the case where we want to default map one field and explicitly map * another. */ @Test public void testExplicitMapping() throws Exception { // We will convert s1 from string to long (or leave it null), ignore s2, // convert l1 from long to string, and leave l2 the same. Schema input = NESTED_RECORD_SCHEMA; Schema parent = NESTED_PARENT_SCHEMA; Schema output = UNNESTED_OUTPUT_SCHEMA; Map<String, String> mapping = ImmutableMap.of("parent.id", "parentId"); AvroRecordConverter converter = new AvroRecordConverter(input, output, mapping); Record inputRecord = new Record(input); inputRecord.put("l1", 5L); inputRecord.put("s1", "1000"); Record parentRecord = new Record(parent); parentRecord.put("id", 200L); parentRecord.put("name", "parent"); inputRecord.put("parent", parentRecord); Record outputRecord = converter.convert(inputRecord); assertEquals(5L, outputRecord.get("l1")); assertEquals(1000L, outputRecord.get("s1")); assertEquals(200L, outputRecord.get("parentId")); }
Example 5
Source File: TestReadProjection.java From iceberg with Apache License 2.0 | 6 votes |
@Test public void testRename() throws Exception { Schema writeSchema = new Schema( Types.NestedField.required(0, "id", Types.LongType.get()), Types.NestedField.optional(1, "data", Types.StringType.get()) ); Record record = new Record(AvroSchemaUtil.convert(writeSchema, "table")); record.put("id", 34L); record.put("data", "test"); Schema readSchema = new Schema( Types.NestedField.required(0, "id", Types.LongType.get()), Types.NestedField.optional(1, "renamed", Types.StringType.get()) ); Record projected = writeAndRead("project_and_rename", writeSchema, readSchema, record); Assert.assertEquals("Should contain the correct id value", 34L, (long) projected.get("id")); int cmp = Comparators.charSequences() .compare("test", (CharSequence) projected.get("renamed")); Assert.assertTrue("Should contain the correct data/renamed value", cmp == 0); }
Example 6
Source File: TestKiteStorageProcessor.java From localization_nifi with Apache License 2.0 | 6 votes |
@Test public void testIncompatibleSchema() throws IOException { Schema incompatible = SchemaBuilder.record("User").fields() .requiredLong("id") .requiredString("username") .optionalString("email") // the dataset requires this field .endRecord(); // this user has the email field and could be stored, but the schema is // still incompatible so the entire stream is rejected Record incompatibleUser = new Record(incompatible); incompatibleUser.put("id", 1L); incompatibleUser.put("username", "a"); incompatibleUser.put("email", "[email protected]"); TestRunner runner = TestRunners.newTestRunner(StoreInKiteDataset.class); runner.setProperty(StoreInKiteDataset.KITE_DATASET_URI, datasetUri); runner.assertValid(); runner.enqueue(streamFor(incompatibleUser)); runner.run(); runner.assertAllFlowFilesTransferred("incompatible", 1); }
Example 7
Source File: TestReadProjection.java From iceberg with Apache License 2.0 | 6 votes |
@Test public void testFullProjection() throws Exception { Schema schema = new Schema( Types.NestedField.required(0, "id", Types.LongType.get()), Types.NestedField.optional(1, "data", Types.StringType.get()) ); Record record = new Record(AvroSchemaUtil.convert(schema, "table")); record.put("id", 34L); record.put("data", "test"); Record projected = writeAndRead("full_projection", schema, schema, record); Assert.assertEquals("Should contain the correct id value", 34L, (long) projected.get("id")); int cmp = Comparators.charSequences() .compare("test", (CharSequence) projected.get("data")); Assert.assertTrue("Should contain the correct data value", cmp == 0); }
Example 8
Source File: TestReadProjection.java From iceberg with Apache License 2.0 | 6 votes |
@Test public void testEmptyProjection() throws Exception { Schema schema = new Schema( Types.NestedField.required(0, "id", Types.LongType.get()), Types.NestedField.optional(1, "data", Types.StringType.get()) ); Record record = new Record(AvroSchemaUtil.convert(schema, "table")); record.put("id", 34L); record.put("data", "test"); Record projected = writeAndRead("empty_projection", schema, schema.select(), record); Assert.assertNotNull("Should read a non-null record", projected); try { projected.get(0); Assert.fail("Should not retrieve value with ordinal 0"); } catch (ArrayIndexOutOfBoundsException e) { // this is expected because there are no values } }
Example 9
Source File: GoogleDriveGetReader.java From components with Apache License 2.0 | 6 votes |
@Override public boolean start() throws IOException { super.start(); // String resourceId = properties.fileAccessMethod.getValue().equals(AccessMethod.Id) ? properties.file.getValue() : utils.getFileId(properties.file.getValue()); Map<String, MimeType> mimes = GoogleDriveMimeTypes.newDefaultMimeTypesSupported(); mimes.put(MIME_TYPE_GOOGLE_DOCUMENT, properties.exportDocument.getValue()); mimes.put(MIME_TYPE_GOOGLE_DRAWING, properties.exportDrawing.getValue()); mimes.put(MIME_TYPE_GOOGLE_PRESENTATION, properties.exportPresentation.getValue()); mimes.put(MIME_TYPE_GOOGLE_SPREADSHEET, properties.exportSpreadsheet.getValue()); GoogleDriveGetParameters p = new GoogleDriveGetParameters(resourceId, mimes, properties.storeToLocal.getValue(), properties.outputFileName.getValue(), properties.setOutputExt.getValue()); // GoogleDriveGetResult r = utils.getResource(p); fileId = r.getId(); byte[] content = r.getContent(); record = new Record(properties.schemaMain.schema.getValue()); record.put(0, content); result.totalCount++; result.successCount++; return true; }
Example 10
Source File: TestReadProjection.java From iceberg with Apache License 2.0 | 6 votes |
@Test public void testRename() throws Exception { Schema writeSchema = new Schema( Types.NestedField.required(0, "id", Types.LongType.get()), Types.NestedField.optional(1, "data", Types.StringType.get()) ); Record record = new Record(AvroSchemaUtil.convert(writeSchema, "table")); record.put("id", 34L); record.put("data", "test"); Schema readSchema = new Schema( Types.NestedField.required(0, "id", Types.LongType.get()), Types.NestedField.optional(1, "renamed", Types.StringType.get()) ); Record projected = writeAndRead("project_and_rename", writeSchema, readSchema, record); Assert.assertEquals("Should contain the correct id value", 34L, (long) projected.get("id")); int cmp = Comparators.charSequences() .compare("test", (CharSequence) projected.get("renamed")); Assert.assertTrue("Should contain the correct data/renamed value", cmp == 0); }
Example 11
Source File: TestKiteStorageProcessor.java From nifi with Apache License 2.0 | 6 votes |
@Test public void testIncompatibleSchema() throws IOException { Schema incompatible = SchemaBuilder.record("User").fields() .requiredLong("id") .requiredString("username") .optionalString("email") // the dataset requires this field .endRecord(); // this user has the email field and could be stored, but the schema is // still incompatible so the entire stream is rejected Record incompatibleUser = new Record(incompatible); incompatibleUser.put("id", 1L); incompatibleUser.put("username", "a"); incompatibleUser.put("email", "[email protected]"); TestRunner runner = TestRunners.newTestRunner(StoreInKiteDataset.class); runner.setProperty(StoreInKiteDataset.KITE_DATASET_URI, datasetUri); runner.assertValid(); runner.enqueue(streamFor(incompatibleUser)); runner.run(); runner.assertAllFlowFilesTransferred("incompatible", 1); }
Example 12
Source File: TestReadProjection.java From iceberg with Apache License 2.0 | 5 votes |
@Test public void testBasicProjection() throws Exception { Schema writeSchema = new Schema( Types.NestedField.required(0, "id", Types.LongType.get()), Types.NestedField.optional(1, "data", Types.StringType.get()) ); Record record = new Record(AvroSchemaUtil.convert(writeSchema, "table")); record.put("id", 34L); record.put("data", "test"); Schema idOnly = new Schema( Types.NestedField.required(0, "id", Types.LongType.get()) ); Record projected = writeAndRead("basic_projection_id", writeSchema, idOnly, record); Assert.assertNull("Should not project data", projected.get("data")); Assert.assertEquals("Should contain the correct id value", 34L, (long) projected.get("id")); Schema dataOnly = new Schema( Types.NestedField.optional(1, "data", Types.StringType.get()) ); projected = writeAndRead("basic_projection_data", writeSchema, dataOnly, record); Assert.assertNull("Should not project id", projected.get("id")); int cmp = Comparators.charSequences() .compare("test", (CharSequence) projected.get("data")); Assert.assertTrue("Should contain the correct data value", cmp == 0); }
Example 13
Source File: MarketoCampaignWriterTest.java From components with Apache License 2.0 | 5 votes |
@Before public void setUp() throws Exception { super.setUp(); props = new TMarketoCampaignProperties("test"); props.schemaInput.setupProperties(); props.schemaInput.setupLayout(); props.connection.setupProperties(); props.connection.setupLayout(); props.connection.setupProperties(); props.connection.endpoint.setValue("https://fake.io/rest"); props.connection.clientAccessId.setValue("clientaccess"); props.connection.secretKey.setValue("sekret"); props.connection.attemptsIntervalTime.setValue(200); // shorten interval for tests props.setupProperties(); props.setupLayout(); props.campaignAction.setValue(CampaignAction.trigger); props.afterCampaignAction(); when(sink.getProperties()).thenReturn(props); wop = new MarketoWriteOperation(sink); writer = new MarketoCampaignWriter(wop, null); writer.properties = props; assertTrue(writer instanceof MarketoCampaignWriter); record = new Record(MarketoConstants.triggerCampaignSchema()); record.put(0, "12345"); }
Example 14
Source File: TestFilteredScan.java From iceberg with Apache License 2.0 | 5 votes |
private static Record record(org.apache.avro.Schema schema, Object... values) { Record rec = new Record(schema); for (int i = 0; i < values.length; i += 1) { rec.put(i, values[i]); } return rec; }
Example 15
Source File: TestReadProjection.java From iceberg with Apache License 2.0 | 5 votes |
@Test public void testMapProjection() throws IOException { Schema writeSchema = new Schema( Types.NestedField.required(0, "id", Types.LongType.get()), Types.NestedField.optional(5, "properties", Types.MapType.ofOptional(6, 7, Types.StringType.get(), Types.StringType.get())) ); Map<String, String> properties = ImmutableMap.of("a", "A", "b", "B"); Record record = new Record(AvroSchemaUtil.convert(writeSchema, "table")); record.put("id", 34L); record.put("properties", properties); Schema idOnly = new Schema( Types.NestedField.required(0, "id", Types.LongType.get()) ); Record projected = writeAndRead("id_only", writeSchema, idOnly, record); Assert.assertEquals("Should contain the correct id value", 34L, (long) projected.get("id")); Assert.assertNull("Should not project properties map", projected.get("properties")); Schema keyOnly = writeSchema.select("properties.key"); projected = writeAndRead("key_only", writeSchema, keyOnly, record); Assert.assertNull("Should not project id", projected.get("id")); Assert.assertEquals("Should project entire map", properties, toStringMap((Map) projected.get("properties"))); Schema valueOnly = writeSchema.select("properties.value"); projected = writeAndRead("value_only", writeSchema, valueOnly, record); Assert.assertNull("Should not project id", projected.get("id")); Assert.assertEquals("Should project entire map", properties, toStringMap((Map) projected.get("properties"))); Schema mapOnly = writeSchema.select("properties"); projected = writeAndRead("map_only", writeSchema, mapOnly, record); Assert.assertNull("Should not project id", projected.get("id")); Assert.assertEquals("Should project entire map", properties, toStringMap((Map) projected.get("properties"))); }
Example 16
Source File: TestConvertAvroSchema.java From nifi with Apache License 2.0 | 5 votes |
private Record dataNested(long id, String companyName, Long parentId, String parentName) { Record result = new Record(TestAvroRecordConverter.NESTED_RECORD_SCHEMA); result.put("l1", id); result.put("s1", companyName); if (parentId != null || parentName != null) { Record parent = new Record( TestAvroRecordConverter.NESTED_PARENT_SCHEMA); parent.put("id", parentId); parent.put("name", parentName); result.put("parent", parent); } return result; }
Example 17
Source File: GoogleDriveCopyReader.java From components with Apache License 2.0 | 5 votes |
@Override public boolean start() throws IOException { super.start(); CopyMode copyMode = properties.copyMode.getValue(); String source = properties.source.getValue(); String destinationFolder = properties.destinationFolder.getValue(); String newName = properties.rename.getValue() ? properties.newName.getValue() : ""; boolean deleteSourceFile = properties.deleteSourceFile.getValue(); /* check for destination folder */ String destinationFolderId = properties.destinationFolderAccessMethod.getValue().equals(AccessMethod.Id) ? destinationFolder : utils.getFolderId(destinationFolder, false); /* work on a fileName */ if (CopyMode.File.equals(copyMode)) { /* check for managed resource */ sourceId = properties.sourceAccessMethod.getValue().equals(AccessMethod.Id) ? source : utils.getFileId(source); destinationId = utils.copyFile(sourceId, destinationFolderId, newName, deleteSourceFile); } else {/* work on a folder */ /* check for managed resource */ sourceId = properties.sourceAccessMethod.getValue().equals(AccessMethod.Id) ? source : utils.getFolderId(source, false); if (newName.isEmpty()) { List<String> paths = utils.getExplodedPath(source); newName = paths.get(paths.size() - 1); } destinationId = utils.copyFolder(sourceId, destinationFolderId, newName); } // record = new Record(properties.schemaMain.schema.getValue()); record.put(0, sourceId); record.put(1, destinationId); result.totalCount++; result.successCount++; return true; }
Example 18
Source File: ParquetAvroValueReaders.java From iceberg with Apache License 2.0 | 4 votes |
@Override protected void set(Record struct, int pos, Object value) { struct.put(pos, value); }
Example 19
Source File: TestReadProjection.java From iceberg with Apache License 2.0 | 4 votes |
@Test @SuppressWarnings("unchecked") public void testListOfStructsProjection() throws IOException { Schema writeSchema = new Schema( Types.NestedField.required(0, "id", Types.LongType.get()), Types.NestedField.optional(22, "points", Types.ListType.ofOptional(21, Types.StructType.of( Types.NestedField.required(19, "x", Types.IntegerType.get()), Types.NestedField.optional(18, "y", Types.IntegerType.get()) )) ) ); Record record = new Record(AvroSchemaUtil.convert(writeSchema, "table")); record.put("id", 34L); Record p1 = new Record(AvroSchemaUtil.fromOption( AvroSchemaUtil.fromOption(record.getSchema().getField("points").schema()) .getElementType())); p1.put("x", 1); p1.put("y", 2); Record p2 = new Record(p1.getSchema()); p2.put("x", 3); p2.put("y", null); record.put("points", ImmutableList.of(p1, p2)); Schema idOnly = new Schema( Types.NestedField.required(0, "id", Types.LongType.get()) ); Record projected = writeAndRead("id_only", writeSchema, idOnly, record); Assert.assertEquals("Should contain the correct id value", 34L, (long) projected.get("id")); Assert.assertNull("Should not project points list", projected.get("points")); projected = writeAndRead("all_points", writeSchema, writeSchema.select("points"), record); Assert.assertNull("Should not project id", projected.get("id")); Assert.assertEquals("Should project points list", record.get("points"), projected.get("points")); projected = writeAndRead("x_only", writeSchema, writeSchema.select("points.x"), record); Assert.assertNull("Should not project id", projected.get("id")); Assert.assertNotNull("Should project points list", projected.get("points")); List<Record> points = (List<Record>) projected.get("points"); Assert.assertEquals("Should read 2 points", 2, points.size()); Record projectedP1 = points.get(0); Assert.assertEquals("Should project x", 1, (int) projectedP1.get("x")); Assert.assertNull("Should not project y", projectedP1.get("y")); Record projectedP2 = points.get(1); Assert.assertEquals("Should project x", 3, (int) projectedP2.get("x")); Assert.assertNull("Should not project y", projectedP2.get("y")); projected = writeAndRead("y_only", writeSchema, writeSchema.select("points.y"), record); Assert.assertNull("Should not project id", projected.get("id")); Assert.assertNotNull("Should project points list", projected.get("points")); points = (List<Record>) projected.get("points"); Assert.assertEquals("Should read 2 points", 2, points.size()); projectedP1 = points.get(0); Assert.assertNull("Should not project x", projectedP1.get("x")); Assert.assertEquals("Should project y", 2, (int) projectedP1.get("y")); projectedP2 = points.get(1); Assert.assertNull("Should not project x", projectedP2.get("x")); Assert.assertEquals("Should project null y", null, projectedP2.get("y")); Schema yRenamed = new Schema( Types.NestedField.optional(22, "points", Types.ListType.ofOptional(21, Types.StructType.of( Types.NestedField.optional(18, "z", Types.IntegerType.get()) )) ) ); projected = writeAndRead("y_renamed", writeSchema, yRenamed, record); Assert.assertNull("Should not project id", projected.get("id")); Assert.assertNotNull("Should project points list", projected.get("points")); points = (List<Record>) projected.get("points"); Assert.assertEquals("Should read 2 points", 2, points.size()); projectedP1 = points.get(0); Assert.assertNull("Should not project x", projectedP1.get("x")); Assert.assertNull("Should not project y", projectedP1.get("y")); Assert.assertEquals("Should project z", 2, (int) projectedP1.get("z")); projectedP2 = points.get(1); Assert.assertNull("Should not project x", projectedP2.get("x")); Assert.assertNull("Should not project y", projectedP2.get("y")); Assert.assertEquals("Should project null z", null, projectedP2.get("z")); }
Example 20
Source File: TestAvroRecordConverter.java From nifi with Apache License 2.0 | 4 votes |
/** * Tests the case where we don't use a mapping file and just map records by * name. */ @Test public void testDefaultConversion() throws Exception { // We will convert s1 from string to long (or leave it null), ignore s2, // convert s3 to from string to double, convert l1 from long to string, // and leave l2 the same. Schema input = SchemaBuilder.record("Input") .namespace("com.cloudera.edh").fields() .nullableString("s1", "").requiredString("s2") .requiredString("s3").optionalLong("l1").requiredLong("l2") .endRecord(); Schema output = SchemaBuilder.record("Output") .namespace("com.cloudera.edh").fields().optionalLong("s1") .optionalString("l1").requiredLong("l2").requiredDouble("s3") .endRecord(); AvroRecordConverter converter = new AvroRecordConverter(input, output, EMPTY_MAPPING, LocaleUtils.toLocale("en_US")); Record inputRecord = new Record(input); inputRecord.put("s1", null); inputRecord.put("s2", "blah"); inputRecord.put("s3", "5.5"); inputRecord.put("l1", null); inputRecord.put("l2", 5L); Record outputRecord = converter.convert(inputRecord); assertNull(outputRecord.get("s1")); assertNull(outputRecord.get("l1")); assertEquals(5L, outputRecord.get("l2")); assertEquals(5.5, outputRecord.get("s3")); inputRecord.put("s1", "500"); inputRecord.put("s2", "blah"); inputRecord.put("s3", "5.5e-5"); inputRecord.put("l1", 100L); inputRecord.put("l2", 2L); outputRecord = converter.convert(inputRecord); assertEquals(500L, outputRecord.get("s1")); assertEquals("100", outputRecord.get("l1")); assertEquals(2L, outputRecord.get("l2")); assertEquals(5.5e-5, outputRecord.get("s3")); }