Java Code Examples for org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector#getPrimitiveJavaObject()
The following examples show how to use
org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector#getPrimitiveJavaObject() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ST_GeomFromGeoJson.java From spatial-framework-for-hadoop with Apache License 2.0 | 6 votes |
@Override public Object evaluate(DeferredObject[] arguments) throws HiveException { DeferredObject jsonDeferredObject = arguments[0]; String json = null; if (jsonOI.getCategory() == Category.STRUCT){ //StructObjectInspector structOI = (StructObjectInspector)jsonOI; // TODO support structs } else { PrimitiveObjectInspector primOI = (PrimitiveObjectInspector)jsonOI; json = (String)primOI.getPrimitiveJavaObject(jsonDeferredObject.get()); } try { OGCGeometry ogcGeom = OGCGeometry.fromGeoJson(json); return GeometryUtils.geometryToEsriShapeBytesWritable(ogcGeom); } catch (Exception e) { LogUtils.Log_InvalidText(LOG, json); } return null; }
Example 2
Source File: ST_GeomFromJson.java From spatial-framework-for-hadoop with Apache License 2.0 | 6 votes |
@Override public Object evaluate(DeferredObject[] arguments) throws HiveException { DeferredObject jsonDeferredObject = arguments[0]; String json = null; if (jsonOI.getCategory() == Category.STRUCT){ //StructObjectInspector structOI = (StructObjectInspector)jsonOI; // TODO support structs } else { PrimitiveObjectInspector primOI = (PrimitiveObjectInspector)jsonOI; json = (String)primOI.getPrimitiveJavaObject(jsonDeferredObject.get()); } try { OGCGeometry ogcGeom = OGCGeometry.fromJson(json); return GeometryUtils.geometryToEsriShapeBytesWritable(ogcGeom); } catch (Exception e) { } return null; }
Example 3
Source File: ObjectInspectorTest.java From Hive-XML-SerDe with Apache License 2.0 | 6 votes |
@SuppressWarnings("rawtypes") public void testSimpleXmlMap() throws SerDeException { XmlSerDe xmlSerDe = new XmlSerDe(); Configuration configuration = new Configuration(); Properties properties = new Properties(); properties.put(LIST_COLUMNS, "test"); properties.put(LIST_COLUMN_TYPES, "map<string,string>"); properties.setProperty("column.xpath.test", "//*[contains(name(),'test')]"); xmlSerDe.initialize(configuration, properties); Text text = new Text(); text.set("<root><test1>string1</test1><test2>string2</test2></root>"); Object o = xmlSerDe.deserialize(text); XmlStructObjectInspector structInspector = ((XmlStructObjectInspector) xmlSerDe.getObjectInspector()); StructField structField = structInspector.getStructFieldRef("test"); Object data = structInspector.getStructFieldData(o, structField); XmlMapObjectInspector fieldInspector = (XmlMapObjectInspector) structField.getFieldObjectInspector(); Map map = fieldInspector.getMap(data); PrimitiveObjectInspector valueObjectInspector = (PrimitiveObjectInspector) fieldInspector.getMapValueObjectInspector(); String test = (String) valueObjectInspector.getPrimitiveJavaObject(map.get("test1")); assertEquals("string1", test); }
Example 4
Source File: HiveJdbcBridgeUtils.java From HiveJdbcStorageHandler with Apache License 2.0 | 6 votes |
public static Object deparseObject(Object field, ObjectInspector fieldOI) throws SerDeException { switch(fieldOI.getCategory()) { case PRIMITIVE: { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) fieldOI; return oi.getPrimitiveJavaObject(field); } case LIST: { ListObjectInspector listOI = (ListObjectInspector) fieldOI; List<?> elements = listOI.getList(field); List<Object> list = new ArrayList<Object>(elements.size()); ObjectInspector elemOI = listOI.getListElementObjectInspector(); for(Object elem : elements) { Object o = deparseObject(elem, elemOI); list.add(o); } return list; } default: throw new SerDeException("Unexpected fieldOI: " + fieldOI); } }
Example 5
Source File: HiveKuduBridgeUtils.java From HiveKudu-Handler with Apache License 2.0 | 5 votes |
public static Object deparseObject(Object field, ObjectInspector fieldOI) throws SerDeException { switch (fieldOI.getCategory()) { case PRIMITIVE: { PrimitiveObjectInspector oi = (PrimitiveObjectInspector) fieldOI; return oi.getPrimitiveJavaObject(field); } //Kudu doesnt support LIST or MAP based data types default: throw new SerDeException("Unexpected fieldOI: " + fieldOI); } }
Example 6
Source File: BaseJsonSerDe.java From spatial-framework-for-hadoop with Apache License 2.0 | 5 votes |
private void generateJsonFromWritable(Writable value, int fieldIndex, String label, PrimitiveObjectInspector poi, JsonGenerator jsonGen) throws JsonProcessingException, IOException { Object prim = poi.getPrimitiveJavaObject(value); if (prim instanceof java.util.Date) { long epoch = ((java.util.Date)prim).getTime(); long offset = prim instanceof java.sql.Timestamp ? 0 : tz.getOffset(epoch); jsonGen.writeObjectField(label, epoch - offset); // UTC } else { jsonGen.writeObjectField(label, prim); } }
Example 7
Source File: TestDataWritableWriter.java From presto with Apache License 2.0 | 4 votes |
/** * It writes the primitive value to the Parquet RecordConsumer. * * @param value The object that contains the primitive value. * @param inspector The object inspector used to get the correct value type. */ private void writePrimitive(Object value, PrimitiveObjectInspector inspector) { if (value == null) { return; } switch (inspector.getPrimitiveCategory()) { case VOID: return; case DOUBLE: recordConsumer.addDouble(((DoubleObjectInspector) inspector).get(value)); break; case BOOLEAN: recordConsumer.addBoolean(((BooleanObjectInspector) inspector).get(value)); break; case FLOAT: recordConsumer.addFloat(((FloatObjectInspector) inspector).get(value)); break; case BYTE: recordConsumer.addInteger(((ByteObjectInspector) inspector).get(value)); break; case INT: recordConsumer.addInteger(((IntObjectInspector) inspector).get(value)); break; case LONG: recordConsumer.addLong(((LongObjectInspector) inspector).get(value)); break; case SHORT: recordConsumer.addInteger(((ShortObjectInspector) inspector).get(value)); break; case STRING: String v = ((StringObjectInspector) inspector).getPrimitiveJavaObject(value); recordConsumer.addBinary(Binary.fromString(v)); break; case CHAR: String vChar = ((HiveCharObjectInspector) inspector).getPrimitiveJavaObject(value).getStrippedValue(); recordConsumer.addBinary(Binary.fromString(vChar)); break; case VARCHAR: String vVarchar = ((HiveVarcharObjectInspector) inspector).getPrimitiveJavaObject(value).getValue(); recordConsumer.addBinary(Binary.fromString(vVarchar)); break; case BINARY: byte[] vBinary = ((BinaryObjectInspector) inspector).getPrimitiveJavaObject(value); recordConsumer.addBinary(Binary.fromByteArray(vBinary)); break; case TIMESTAMP: Timestamp ts = ((TimestampObjectInspector) inspector).getPrimitiveJavaObject(value); recordConsumer.addBinary(NanoTimeUtils.getNanoTime(ts, false).toBinary()); break; case DECIMAL: HiveDecimal vDecimal = ((HiveDecimal) inspector.getPrimitiveJavaObject(value)); DecimalTypeInfo decTypeInfo = (DecimalTypeInfo) inspector.getTypeInfo(); recordConsumer.addBinary(decimalToBinary(vDecimal, decTypeInfo)); break; case DATE: Date vDate = ((DateObjectInspector) inspector).getPrimitiveJavaObject(value); recordConsumer.addInteger(DateWritable.dateToDays(vDate)); break; default: throw new IllegalArgumentException("Unsupported primitive data type: " + inspector.getPrimitiveCategory()); } }
Example 8
Source File: DistributedCacheLookupUDF.java From incubator-hivemall with Apache License 2.0 | 4 votes |
private static void loadValues(Object2ObjectMap<Object, Object> map, File file, PrimitiveObjectInspector keyOI, PrimitiveObjectInspector valueOI) throws IOException, SerDeException { if (!file.exists()) { return; } if (!file.getName().endsWith(".crc")) { if (file.isDirectory()) { for (File f : file.listFiles()) { loadValues(map, f, keyOI, valueOI); } } else { LazySimpleSerDe serde = HiveUtils.getKeyValueLineSerde(keyOI, valueOI); StructObjectInspector lineOI = (StructObjectInspector) serde.getObjectInspector(); StructField keyRef = lineOI.getStructFieldRef("key"); StructField valueRef = lineOI.getStructFieldRef("value"); PrimitiveObjectInspector keyRefOI = (PrimitiveObjectInspector) keyRef.getFieldObjectInspector(); PrimitiveObjectInspector valueRefOI = (PrimitiveObjectInspector) valueRef.getFieldObjectInspector(); BufferedReader reader = null; try { reader = HadoopUtils.getBufferedReader(file); String line; while ((line = reader.readLine()) != null) { Text lineText = new Text(line); Object lineObj = serde.deserialize(lineText); List<Object> fields = lineOI.getStructFieldsDataAsList(lineObj); Object f0 = fields.get(0); Object f1 = fields.get(1); Object k = keyRefOI.getPrimitiveJavaObject(f0); Object v = valueRefOI.getPrimitiveWritableObject(valueRefOI.copyObject(f1)); map.put(k, v); } } finally { IOUtils.closeQuietly(reader); } } } }
Example 9
Source File: BlurSerializer.java From incubator-retired-blur with Apache License 2.0 | 4 votes |
private String toString(String columnName, Object data, PrimitiveObjectInspector primitiveObjectInspector) throws SerDeException { Object primitiveJavaObject = primitiveObjectInspector.getPrimitiveJavaObject(data); return toString(columnName, primitiveJavaObject); }
Example 10
Source File: SMSerDe.java From spliceengine with GNU Affero General Public License v3.0 | 4 votes |
/** * This method takes an object representing a row of data from Hive, and * uses the ObjectInspector to get the data for each column and serialize * it. */ //@Override public Writable serialize(Object obj, ObjectInspector oi) throws SerDeException { ExecRow row = null; int[] execRowFormatIds = null; try { List<NameType> nameTypes = sqlUtil.getTableStructure(tableName); execRowFormatIds = sqlUtil.getExecRowFormatIds(colNames, nameTypes); row = sqlUtil.getExecRow(execRowFormatIds); if (row == null) throw new SerDeException("ExecRow Cannot be Null"); } catch (SQLException | StandardException | IOException e1) { throw new SerDeException(e1); } if (Log.isTraceEnabled()) SpliceLogUtils.trace(Log, "serialize with obj=%s, oi=%s",obj,oi); if (oi.getCategory() != ObjectInspector.Category.STRUCT) { throw new SerDeException(getClass().toString() + " can only serialize struct types, but we got: " + oi.getTypeName()); } StructObjectInspector soi = (StructObjectInspector) oi; List<? extends StructField> fields = soi.getAllStructFieldRefs(); try { DataValueDescriptor dvd; for (int i = 0; i < fields.size(); i++) { StructField field = fields.get(i); dvd = row.getColumn(i+1); ObjectInspector fieldOI = field.getFieldObjectInspector(); Object fieldObj = soi.getStructFieldData(obj, field); PrimitiveObjectInspector primOI = (PrimitiveObjectInspector) fieldOI; Object data = primOI.getPrimitiveJavaObject(fieldObj); PrimitiveCategory primitiveCategory = primOI.getPrimitiveCategory(); switch (primitiveCategory) { case BYTE: dvd.setValue(((Byte) data).byteValue()); break; case INT: dvd.setValue(((Integer) data).intValue()); break; case VARCHAR: dvd.setValue(((HiveVarchar)data).getValue()); break; case CHAR: dvd.setValue(((HiveChar)data).getValue()); break; case STRING: dvd.setValue((String) data); break; case BINARY: dvd.setValue((SerializationUtils.serialize((Serializable) data))); // is this right? Should just be a byte[] break; case BOOLEAN: dvd.setValue(((Boolean) data).booleanValue()); break; case DECIMAL: dvd.setValue(((HiveDecimal) data).doubleValue()); break; case DOUBLE: dvd.setValue(((Double) data).doubleValue()); break; case FLOAT: dvd.setValue(((Float) data).floatValue()); break; case LONG: dvd.setValue(((Long) data).longValue()); break; case SHORT: dvd.setValue(((Short) data).shortValue()); break; case TIMESTAMP: dvd.setValue((Timestamp) data); break; case DATE: dvd.setValue((java.sql.Date) data); break; default: throw new SerDeException(String.format("Hive Type %s Not Supported Yet",primOI.getPrimitiveCategory())); } } } catch (StandardException e) { // TODO Auto-generated catch block throw new RuntimeException("Serialized Object To Java Type Error"); } ExecRowWritable rowWritable = new ExecRowWritable(WriteReadUtils.getExecRowFromTypeFormatIds(execRowFormatIds)); rowWritable.set(row); return rowWritable; }
Example 11
Source File: DataToSketchUDAF.java From incubator-datasketches-hive with Apache License 2.0 | 3 votes |
/** * Override this if it takes more than a cast to convert Hive value into the sketch update type U * @param data Hive value object * @param valueInspector PrimitiveObjectInspector for the value * @return extracted value * @throws HiveException if anything goes wrong */ public U extractValue(final Object data, final PrimitiveObjectInspector valueInspector) throws HiveException { @SuppressWarnings("unchecked") final U value = (U) valueInspector.getPrimitiveJavaObject(data); return value; }
Example 12
Source File: JSONCDHSerDe.java From bigdata-tutorial with Apache License 2.0 | 2 votes |
/** * Deparses a primitive type. * * @param obj - Hive object to deparse * @param primOI - ObjectInspector for the object * @return - A deparsed object */ private Object deparsePrimitive(Object obj, PrimitiveObjectInspector primOI) { return primOI.getPrimitiveJavaObject(obj); }
Example 13
Source File: JSONSerDe.java From searchanalytics-bigdata with MIT License | 2 votes |
/** * Deparses a primitive type. * * @param obj * - Hive object to deparse * @param oi * - ObjectInspector for the object * @return - A deparsed object */ private Object deparsePrimitive(final Object obj, final PrimitiveObjectInspector primOI) { return primOI.getPrimitiveJavaObject(obj); }