org.apache.hadoop.hive.serde2.lazy.LazyMap Java Examples
The following examples show how to use
org.apache.hadoop.hive.serde2.lazy.LazyMap.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TestLazyMap.java From presto with Apache License 2.0 | 6 votes |
public static void assertMapDecode(String encodedMap, Map<? extends Object, ? extends Object> expectedMap) { LazyMap lazyMap = (LazyMap) createLazyObject(getLazySimpleMapObjectInspector( LAZY_STRING_OBJECT_INSPECTOR, getLazyStringObjectInspector(false, (byte) 0), (byte) 2, (byte) 3, new Text("\\N"), false, (byte) 0)); lazyMap.init(newByteArrayRef(encodedMap), 0, encodedMap.length()); Map<Object, Object> map = lazyMap.getMap(); assertEquals(map, expectedMap); }
Example #2
Source File: HiveRCSchemaUtil.java From spork with Apache License 2.0 | 6 votes |
/** * Converts the LazyMap to a InternalMap. * * @param map * LazyMap * @return InternalMap */ public static InternalMap parseLazyMapToPigMap(LazyMap map) { InternalMap pigmap = new InternalMap(); Map<Object, Object> javamap = map.getMap(); if (javamap != null) { // for each item in the map extract the java primitive type for (Entry<Object, Object> entry : javamap.entrySet()) { pigmap.put(extractPigTypeFromHiveType(entry.getKey()), extractPigTypeFromHiveType(entry.getValue())); } } return pigmap; }
Example #3
Source File: HiveRCSchemaUtil.java From spork with Apache License 2.0 | 5 votes |
/** * Converts from a hive type to a pig type * * @param value * Object hive type * @return Object pig type */ public static Object extractPigTypeFromHiveType(Object value) { if (value instanceof org.apache.hadoop.hive.serde2.lazy.LazyArray) { value = parseLazyArrayToPigArray((org.apache.hadoop.hive.serde2.lazy.LazyArray) value); } else if (value instanceof org.apache.hadoop.hive.serde2.lazy.LazyMap) { value = parseLazyMapToPigMap((org.apache.hadoop.hive.serde2.lazy.LazyMap) value); } else { if (value instanceof LazyString) { value = ((LazyString) value).getWritableObject().toString(); } else if (value instanceof LazyInteger) { value = ((LazyInteger) value).getWritableObject().get(); } else if (value instanceof LazyLong) { value = ((LazyLong) value).getWritableObject().get(); } else if (value instanceof LazyFloat) { value = ((LazyFloat) value).getWritableObject().get(); } else if (value instanceof LazyDouble) { value = ((LazyDouble) value).getWritableObject().get(); } else if (value instanceof LazyBoolean) { boolean boolvalue = ((LazyBoolean) value).getWritableObject() .get(); value = (boolvalue) ? 1 : 0; } else if (value instanceof LazyByte) { value = (int) ((LazyByte) value).getWritableObject().get(); } else if (value instanceof LazyShort) { value = ((LazyShort) value).getWritableObject().get(); } } return value; }
Example #4
Source File: RcFileTester.java From presto with Apache License 2.0 | 4 votes |
private static Object decodeRecordReaderValue(Type type, Object actualValue) { if (actualValue instanceof LazyPrimitive) { actualValue = ((LazyPrimitive<?, ?>) actualValue).getWritableObject(); } if (actualValue instanceof BooleanWritable) { actualValue = ((BooleanWritable) actualValue).get(); } else if (actualValue instanceof ByteWritable) { actualValue = ((ByteWritable) actualValue).get(); } else if (actualValue instanceof BytesWritable) { actualValue = new SqlVarbinary(((BytesWritable) actualValue).copyBytes()); } else if (actualValue instanceof DateWritable) { actualValue = new SqlDate(((DateWritable) actualValue).getDays()); } else if (actualValue instanceof DoubleWritable) { actualValue = ((DoubleWritable) actualValue).get(); } else if (actualValue instanceof FloatWritable) { actualValue = ((FloatWritable) actualValue).get(); } else if (actualValue instanceof IntWritable) { actualValue = ((IntWritable) actualValue).get(); } else if (actualValue instanceof LongWritable) { actualValue = ((LongWritable) actualValue).get(); } else if (actualValue instanceof ShortWritable) { actualValue = ((ShortWritable) actualValue).get(); } else if (actualValue instanceof HiveDecimalWritable) { DecimalType decimalType = (DecimalType) type; HiveDecimalWritable writable = (HiveDecimalWritable) actualValue; // writable messes with the scale so rescale the values to the Presto type BigInteger rescaledValue = rescale(writable.getHiveDecimal().unscaledValue(), writable.getScale(), decimalType.getScale()); actualValue = new SqlDecimal(rescaledValue, decimalType.getPrecision(), decimalType.getScale()); } else if (actualValue instanceof Text) { actualValue = actualValue.toString(); } else if (actualValue instanceof TimestampWritable) { TimestampWritable timestamp = (TimestampWritable) actualValue; if (SESSION.isLegacyTimestamp()) { actualValue = SqlTimestamp.legacyFromMillis(3, (timestamp.getSeconds() * 1000) + (timestamp.getNanos() / 1000000L), UTC_KEY); } else { actualValue = SqlTimestamp.fromMillis(3, (timestamp.getSeconds() * 1000) + (timestamp.getNanos() / 1000000L)); } } else if (actualValue instanceof StructObject) { StructObject structObject = (StructObject) actualValue; actualValue = decodeRecordReaderStruct(type, structObject.getFieldsAsList()); } else if (actualValue instanceof LazyBinaryArray) { actualValue = decodeRecordReaderList(type, ((LazyBinaryArray) actualValue).getList()); } else if (actualValue instanceof LazyBinaryMap) { actualValue = decodeRecordReaderMap(type, ((LazyBinaryMap) actualValue).getMap()); } else if (actualValue instanceof LazyArray) { actualValue = decodeRecordReaderList(type, ((LazyArray) actualValue).getList()); } else if (actualValue instanceof LazyMap) { actualValue = decodeRecordReaderMap(type, ((LazyMap) actualValue).getMap()); } else if (actualValue instanceof List) { actualValue = decodeRecordReaderList(type, ((List<?>) actualValue)); } return actualValue; }
Example #5
Source File: TestHiveColumnarStorage.java From spork with Apache License 2.0 | 4 votes |
@Test public void testShouldStoreMapAsHiveMap() throws IOException, InterruptedException, SerDeException { String loadString = "org.apache.pig.piggybank.storage.HiveColumnarLoader('f1 string,f2 string,f3 string')"; String storeString = "org.apache.pig.piggybank.storage.HiveColumnarStorage()"; String singlePartitionedFile = simpleDataFile.getAbsolutePath(); File outputFile = new File("testhiveColumnarStore"); PigServer server = new PigServer(ExecType.LOCAL); server.setBatchOn(); server.registerQuery("a = LOAD '" + Util.encodeEscape(singlePartitionedFile) + "' using " + loadString + ";"); server.registerQuery("b = FOREACH a GENERATE f1, TOMAP(f2,f3);"); //when server.store("b", outputFile.getAbsolutePath(), storeString); //then Path outputPath = new Path(outputFile.getAbsolutePath()+"/part-m-00000.rc"); ColumnarStruct struct = readRow(outputFile, outputPath, "f1 string,f2 map<string,string>"); assertEquals(2, struct.getFieldsAsList().size()); Object o = struct.getField(0); assertEquals(LazyString.class, o.getClass()); o = struct.getField(1); assertEquals(LazyMap.class, o.getClass()); LazyMap arr = (LazyMap)o; Map<Object,Object> values = arr.getMap(); for(Entry<Object,Object> entry : values.entrySet()) { assertEquals(LazyString.class, entry.getKey().getClass()); assertEquals(LazyString.class, entry.getValue().getClass()); String keyStr =((LazyString) entry.getKey()).getWritableObject().toString(); assertEquals("Sample value", keyStr); String valueStr =((LazyString) entry.getValue()).getWritableObject().toString(); assertEquals("Sample value", valueStr); } }