org.apache.hadoop.hive.serde2.io.HiveVarcharWritable Java Examples
The following examples show how to use
org.apache.hadoop.hive.serde2.io.HiveVarcharWritable.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: CacheablePrimitiveObjectInspectorConverter.java From transport with BSD 2-Clause "Simplified" License | 6 votes |
@Override public Object convert(Object input) { if (input == null) { return null; } // unfortunately we seem to get instances of varchar object inspectors without params // when an old-style UDF has an evaluate() method with varchar arguments. // If we disallow varchar in old-style UDFs and only allow GenericUDFs to be defined // with varchar arguments, then we might be able to enforce this properly. //if (typeParams == null) { // throw new RuntimeException("varchar type used without type params"); //} HiveVarcharWritable hc = new HiveVarcharWritable(); switch (inputOI.getPrimitiveCategory()) { case BOOLEAN: return outputOI.set(hc, ((BooleanObjectInspector) inputOI).get(input) ? new HiveVarchar("TRUE", -1) : new HiveVarchar("FALSE", -1)); default: return outputOI.set(hc, PrimitiveObjectInspectorUtils.getHiveVarchar(input, inputOI)); } }
Example #2
Source File: ObjectInspectors.java From dremio-oss with Apache License 2.0 | 5 votes |
@Override public HiveVarcharWritable getPrimitiveWritableObject(Object o) { <#if mode == "Optional"> if (o == null) { return null; } final NullableVarCharHolder h = (NullableVarCharHolder)o; <#else> final VarCharHolder h = (VarCharHolder)o; </#if> final HiveVarcharWritable valW = new HiveVarcharWritable(); valW.set(StringFunctionHelpers.toStringFromUTF8(h.start, h.end, h.buffer), HiveVarchar.MAX_VARCHAR_LENGTH); return valW; }
Example #3
Source File: HiveInspectors.java From flink with Apache License 2.0 | 4 votes |
public static ObjectInspector getObjectInspector(Class clazz) { TypeInfo typeInfo; if (clazz.equals(String.class) || clazz.equals(Text.class)) { typeInfo = TypeInfoFactory.stringTypeInfo; } else if (clazz.equals(Boolean.class) || clazz.equals(BooleanWritable.class)) { typeInfo = TypeInfoFactory.booleanTypeInfo; } else if (clazz.equals(Byte.class) || clazz.equals(ByteWritable.class)) { typeInfo = TypeInfoFactory.byteTypeInfo; } else if (clazz.equals(Short.class) || clazz.equals(ShortWritable.class)) { typeInfo = TypeInfoFactory.shortTypeInfo; } else if (clazz.equals(Integer.class) || clazz.equals(IntWritable.class)) { typeInfo = TypeInfoFactory.intTypeInfo; } else if (clazz.equals(Long.class) || clazz.equals(LongWritable.class)) { typeInfo = TypeInfoFactory.longTypeInfo; } else if (clazz.equals(Float.class) || clazz.equals(FloatWritable.class)) { typeInfo = TypeInfoFactory.floatTypeInfo; } else if (clazz.equals(Double.class) || clazz.equals(DoubleWritable.class)) { typeInfo = TypeInfoFactory.doubleTypeInfo; } else if (clazz.equals(Date.class) || clazz.equals(DateWritable.class)) { typeInfo = TypeInfoFactory.dateTypeInfo; } else if (clazz.equals(Timestamp.class) || clazz.equals(TimestampWritable.class)) { typeInfo = TypeInfoFactory.timestampTypeInfo; } else if (clazz.equals(byte[].class) || clazz.equals(BytesWritable.class)) { typeInfo = TypeInfoFactory.binaryTypeInfo; } else if (clazz.equals(HiveChar.class) || clazz.equals(HiveCharWritable.class)) { typeInfo = TypeInfoFactory.charTypeInfo; } else if (clazz.equals(HiveVarchar.class) || clazz.equals(HiveVarcharWritable.class)) { typeInfo = TypeInfoFactory.varcharTypeInfo; } else if (clazz.equals(HiveDecimal.class) || clazz.equals(HiveDecimalWritable.class)) { typeInfo = TypeInfoFactory.decimalTypeInfo; } else { throw new FlinkHiveUDFException( String.format("Class %s is not supported yet", clazz.getName())); } return getObjectInspector(typeInfo); }