Java Code Examples for org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector#getMap()
The following examples show how to use
org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector#getMap() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: OrcFlowFileWriter.java From localization_nifi with Apache License 2.0 | 6 votes |
@Override void write(Object obj) throws IOException { super.write(obj); if (obj != null) { MapObjectInspector insp = (MapObjectInspector) inspector; // this sucks, but it will have to do until we can get a better // accessor in the MapObjectInspector. Map<?, ?> valueMap = insp.getMap(obj); lengths.write(valueMap.size()); if (createBloomFilter) { bloomFilter.addLong(valueMap.size()); } for (Map.Entry<?, ?> entry : valueMap.entrySet()) { childrenWriters[0].write(entry.getKey()); childrenWriters[1].write(entry.getValue()); } } }
Example 2
Source File: HiveResolver.java From pxf with Apache License 2.0 | 6 votes |
private List<OneField> traverseMap(Object obj, MapObjectInspector moi) throws BadRecordException, IOException { List<OneField> complexRecord = new LinkedList<>(); List<OneField> mapRecord = new LinkedList<>(); ObjectInspector koi = moi.getMapKeyObjectInspector(); ObjectInspector voi = moi.getMapValueObjectInspector(); Map<?, ?> map = moi.getMap(obj); if (map == null) { throw new BadRecordException( "Illegal value NULL for Hive data type Map"); } else if (map.isEmpty()) { traverseTuple(null, koi, complexRecord, true); traverseTuple(null, voi, complexRecord, true); addOneFieldToRecord(mapRecord, DataType.TEXT, HdfsUtilities.toString(complexRecord, mapkeyDelim)); } else { for (Map.Entry<?, ?> entry : map.entrySet()) { traverseTuple(entry.getKey(), koi, complexRecord, true); traverseTuple(entry.getValue(), voi, complexRecord, true); addOneFieldToRecord(mapRecord, DataType.TEXT, HdfsUtilities.toString(complexRecord, mapkeyDelim)); complexRecord.clear(); } } return mapRecord; }
Example 3
Source File: HiveMapObjectConverter.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 6 votes |
@Override public Object convert(ObjectInspector objectInspector, Object o, TypeInfo odpsTypeInfo) { MapObjectInspector mapObjectInspector = (MapObjectInspector) objectInspector; ObjectInspector mapKeyObjectInspector = mapObjectInspector.getMapKeyObjectInspector(); ObjectInspector mapValueObjectInspector = mapObjectInspector.getMapValueObjectInspector(); TypeInfo mapKeyTypeInfo = ((MapTypeInfo) odpsTypeInfo).getKeyTypeInfo(); TypeInfo mapValueTypeInfo = ((MapTypeInfo) odpsTypeInfo).getValueTypeInfo(); Map map = mapObjectInspector.getMap(o); Map<Object, Object> newMap = new HashMap<>(); for (Object k : map.keySet()) { Object v = map.get(k); newMap.put(HiveObjectConverter.convert(mapKeyObjectInspector, k, mapKeyTypeInfo), HiveObjectConverter.convert(mapValueObjectInspector, v, mapValueTypeInfo)); } return newMap; }
Example 4
Source File: OrcFlowFileWriter.java From nifi with Apache License 2.0 | 6 votes |
@Override void write(Object obj) throws IOException { super.write(obj); if (obj != null) { MapObjectInspector insp = (MapObjectInspector) inspector; // this sucks, but it will have to do until we can get a better // accessor in the MapObjectInspector. Map<?, ?> valueMap = insp.getMap(obj); lengths.write(valueMap.size()); if (createBloomFilter) { bloomFilter.addLong(valueMap.size()); } for (Map.Entry<?, ?> entry : valueMap.entrySet()) { childrenWriters[0].write(entry.getKey()); childrenWriters[1].write(entry.getValue()); } } }
Example 5
Source File: WriterImpl.java From hive-dwrf with Apache License 2.0 | 6 votes |
@Override void write(Object obj) throws IOException { long rawDataSize = 0; if (obj != null) { MapObjectInspector insp = (MapObjectInspector) inspector; // this sucks, but it will have to do until we can get a better // accessor in the MapObjectInspector. Map<?, ?> valueMap = insp.getMap(obj); // Don't use getMapSize(), it's inconsistent for some object inspectors int len = valueMap.size(); lengths.write(len); for(Map.Entry<?, ?> entry: valueMap.entrySet()) { childrenWriters[0].write(entry.getKey()); childrenWriters[1].write(entry.getValue()); rawDataSize += childrenWriters[0].getRowRawDataSize(); rawDataSize += childrenWriters[1].getRowRawDataSize(); } } super.write(obj, rawDataSize); }
Example 6
Source File: SerDeUtils.java From presto with Apache License 2.0 | 5 votes |
private static Block serializeMap(Type type, BlockBuilder builder, Object object, MapObjectInspector inspector, boolean filterNullMapKeys) { Map<?, ?> map = inspector.getMap(object); if (map == null) { requireNonNull(builder, "parent builder is null").appendNull(); return null; } List<Type> typeParameters = type.getTypeParameters(); checkArgument(typeParameters.size() == 2, "map must have exactly 2 type parameter"); Type keyType = typeParameters.get(0); Type valueType = typeParameters.get(1); ObjectInspector keyInspector = inspector.getMapKeyObjectInspector(); ObjectInspector valueInspector = inspector.getMapValueObjectInspector(); BlockBuilder currentBuilder; boolean builderSynthesized = false; if (builder == null) { builderSynthesized = true; builder = type.createBlockBuilder(null, 1); } currentBuilder = builder.beginBlockEntry(); for (Map.Entry<?, ?> entry : map.entrySet()) { // Hive skips map entries with null keys if (!filterNullMapKeys || entry.getKey() != null) { serializeObject(keyType, currentBuilder, entry.getKey(), keyInspector); serializeObject(valueType, currentBuilder, entry.getValue(), valueInspector); } } builder.closeEntry(); if (builderSynthesized) { return (Block) type.getObject(builder, 0); } else { return null; } }
Example 7
Source File: JsonSerdeUtils.java From incubator-hivemall with Apache License 2.0 | 5 votes |
private static void serializeMap(@Nonnull final StringBuilder sb, @Nullable final Object obj, @Nonnull final MapObjectInspector moi) throws SerDeException { ObjectInspector mapKeyObjectInspector = moi.getMapKeyObjectInspector(); ObjectInspector mapValueObjectInspector = moi.getMapValueObjectInspector(); Map<?, ?> omap = moi.getMap(obj); if (omap == null) { sb.append("null"); } else { sb.append(SerDeUtils.LBRACE); boolean first = true; for (Object entry : omap.entrySet()) { if (first) { first = false; } else { sb.append(SerDeUtils.COMMA); } Map.Entry<?, ?> e = (Map.Entry<?, ?>) entry; StringBuilder keyBuilder = new StringBuilder(); buildJSONString(keyBuilder, e.getKey(), mapKeyObjectInspector); String keyString = keyBuilder.toString().trim(); if ((!keyString.isEmpty()) && (keyString.charAt(0) != SerDeUtils.QUOTE)) { appendWithQuotes(sb, keyString); } else { sb.append(keyString); } sb.append(SerDeUtils.COLON); buildJSONString(sb, e.getValue(), mapValueObjectInspector); } sb.append(SerDeUtils.RBRACE); } }
Example 8
Source File: JSONCDHSerDe.java From bigdata-tutorial with Apache License 2.0 | 5 votes |
private Object deparseMap(Object obj, MapObjectInspector mapOI) { Map<Object, Object> map = new HashMap<Object, Object>(); ObjectInspector mapValOI = mapOI.getMapValueObjectInspector(); Map<?, ?> fields = mapOI.getMap(obj); for (Map.Entry<?, ?> field : fields.entrySet()) { Object fieldName = field.getKey(); Object fieldObj = field.getValue(); map.put(fieldName, deparseObject(fieldObj, mapValOI)); } return map; }
Example 9
Source File: ParquetHiveSerDe.java From parquet-mr with Apache License 2.0 | 5 votes |
private Writable createMap(final Object obj, final MapObjectInspector inspector) throws SerDeException { final Map<?, ?> sourceMap = inspector.getMap(obj); final ObjectInspector keyInspector = inspector.getMapKeyObjectInspector(); final ObjectInspector valueInspector = inspector.getMapValueObjectInspector(); final List<ArrayWritable> array = new ArrayList<ArrayWritable>(); if (sourceMap != null) { for (final Entry<?, ?> keyValue : sourceMap.entrySet()) { final Writable key = createObject(keyValue.getKey(), keyInspector); final Writable value = createObject(keyValue.getValue(), valueInspector); if (key != null) { Writable[] arr = new Writable[2]; arr[0] = key; arr[1] = value; array.add(new ArrayWritable(Writable.class, arr)); } } } if (array.size() > 0) { final ArrayWritable subArray = new ArrayWritable(ArrayWritable.class, array.toArray(new ArrayWritable[array.size()])); return new ArrayWritable(Writable.class, new Writable[] {subArray}); } else { return null; } }
Example 10
Source File: JSONSerDe.java From searchanalytics-bigdata with MIT License | 5 votes |
private Object deparseMap(final Object obj, final MapObjectInspector mapOI) { final Map<Object, Object> map = new HashMap<Object, Object>(); final ObjectInspector mapValOI = mapOI.getMapValueObjectInspector(); final Map<?, ?> fields = mapOI.getMap(obj); for (final Map.Entry<?, ?> field : fields.entrySet()) { final Object fieldName = field.getKey(); final Object fieldObj = field.getValue(); map.put(fieldName, deparseObject(fieldObj, mapValOI)); } return map; }
Example 11
Source File: TestDataWritableWriter.java From presto with Apache License 2.0 | 4 votes |
/** * It writes a map type and its key-pair values to the Parquet RecordConsumer. * This is called when the original type (MAP) is detected by writeValue(). * This function assumes the following schema: * optional group mapCol (MAP) { * repeated group map (MAP_KEY_VALUE) { * required TYPE key; * optional TYPE value; * } * } * * @param value The object that contains the map key-values. * @param inspector The object inspector used to get the correct value type. * @param type Type that contains information about the group (MAP) schema. */ private void writeMap(Object value, MapObjectInspector inspector, GroupType type) { // Get the internal map structure (MAP_KEY_VALUE) GroupType repeatedType = type.getType(0).asGroupType(); recordConsumer.startGroup(); Map<?, ?> mapValues = inspector.getMap(value); if (mapValues != null && mapValues.size() > 0) { recordConsumer.startField(repeatedType.getName(), 0); Type keyType = repeatedType.getType(0); String keyName = keyType.getName(); ObjectInspector keyInspector = inspector.getMapKeyObjectInspector(); Type valuetype = repeatedType.getType(1); String valueName = valuetype.getName(); ObjectInspector valueInspector = inspector.getMapValueObjectInspector(); for (Map.Entry<?, ?> keyValue : mapValues.entrySet()) { recordConsumer.startGroup(); if (keyValue != null) { // write key element Object keyElement = keyValue.getKey(); recordConsumer.startField(keyName, 0); writeValue(keyElement, keyInspector, keyType); recordConsumer.endField(keyName, 0); // write value element Object valueElement = keyValue.getValue(); if (valueElement != null) { recordConsumer.startField(valueName, 1); writeValue(valueElement, valueInspector, valuetype); recordConsumer.endField(valueName, 1); } } recordConsumer.endGroup(); } recordConsumer.endField(repeatedType.getName(), 0); } recordConsumer.endGroup(); }
Example 12
Source File: HiveDynamoDBItemType.java From emr-dynamodb-connector with Apache License 2.0 | 4 votes |
/** * Converts a Hive column of type {@code Map<String,String>} into a DynamoDB item. * * It is expected that the Hive data is a map of type <String, String>. The key in Hive data * map is converted to a DynamoDB attribute name. The corresponding value in Hive data map is * converted into DynamoDB AttributeValue. This attribute value is expected to be a JSON * serialized AttributeValue. * * @param data Data from Hive * @param fieldObjectInspector The object inspector for the Hive data. Must have TypeName * Map<String,String>. * * @return DynamoDB item representation of provided data from Hive as a * Map<String,AttributeValue>. * * @throws SerDeException */ public Map<String, AttributeValue> parseDynamoDBData(Object data, ObjectInspector fieldObjectInspector) throws SerDeException { if (!HiveDynamoDBTypeFactory.isHiveDynamoDBItemMapType(fieldObjectInspector)) { throw new SerDeException(getClass().toString() + " Expecting a MapObjectInspector of type " + "map<string,string> for a column which maps DynamoDB item. But we got: " + fieldObjectInspector.getTypeName()); } Map<String, AttributeValue> item = new HashMap<>(); /* map is of type <String, String> */ MapObjectInspector mapOI = (MapObjectInspector) fieldObjectInspector; StringObjectInspector mapKeyObjectInspector = (StringObjectInspector) mapOI .getMapKeyObjectInspector(); StringObjectInspector mapValueObjectInspector = (StringObjectInspector) mapOI .getMapValueObjectInspector(); /* * Get the underlying map object. This is expected to be of type * <String,String> */ Map<?, ?> map = mapOI.getMap(data); if (map == null || map.isEmpty()) { throw new SerDeException("Hive data cannot be null."); } /* Reconstruct the item */ for (Entry<?, ?> entry : map.entrySet()) { /* Get the string key, value pair */ String dynamoDBAttributeName = mapKeyObjectInspector.getPrimitiveJavaObject(entry.getKey()); String dynamoDBAttributeValue = mapValueObjectInspector.getPrimitiveJavaObject(entry.getValue()); /* Deserialize the AttributeValue string */ AttributeValue deserializedAttributeValue = deserializeAttributeValue(dynamoDBAttributeValue); item.put(dynamoDBAttributeName, deserializedAttributeValue); } return item; }