Java Code Examples for org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters#getConverter()
The following examples show how to use
org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters#getConverter() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HiveTextReader.java From dremio-oss with Apache License 2.0 | 6 votes |
@Override public void internalInit(InputSplit inputSplit, JobConf jobConf, ValueVector[] vectors) throws IOException { try (OperatorStats.WaitRecorder recorder = OperatorStats.getWaitRecorder(this.context.getStats())) { reader = jobConf.getInputFormat().getRecordReader(inputSplit, jobConf, Reporter.NULL); } catch(FSError e) { throw HadoopFileSystemWrapper.propagateFSError(e); } if(logger.isTraceEnabled()) { logger.trace("hive reader created: {} for inputSplit {}", reader.getClass().getName(), inputSplit.toString()); } key = reader.createKey(); final FileSplit fileSplit = (FileSplit)inputSplit; skipRecordsInspector = new SkipRecordsInspector(fileSplit.getStart(), jobConf, reader); if (!partitionOI.equals(finalOI)) { // If the partition and table have different schemas, create a converter partTblObjectInspectorConverter = ObjectInspectorConverters.getConverter(partitionOI, finalOI); } }
Example 2
Source File: HiveTextReader.java From dremio-oss with Apache License 2.0 | 6 votes |
@Override public void internalInit(InputSplit inputSplit, JobConf jobConf, ValueVector[] vectors) throws IOException { try (OperatorStats.WaitRecorder recorder = OperatorStats.getWaitRecorder(this.context.getStats())) { reader = jobConf.getInputFormat().getRecordReader(inputSplit, jobConf, Reporter.NULL); } catch(FSError e) { throw HadoopFileSystemWrapper.propagateFSError(e); } if(logger.isTraceEnabled()) { logger.trace("hive reader created: {} for inputSplit {}", reader.getClass().getName(), inputSplit.toString()); } key = reader.createKey(); final FileSplit fileSplit = (FileSplit)inputSplit; skipRecordsInspector = new SkipRecordsInspector(fileSplit.getStart(), jobConf, reader); if (!partitionOI.equals(finalOI)) { // If the partition and table have different schemas, create a converter partTblObjectInspectorConverter = ObjectInspectorConverters.getConverter(partitionOI, finalOI); } }
Example 3
Source File: UDFJsonArrayExtractScalar.java From hive-third-functions with Apache License 2.0 | 6 votes |
@Override public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { if (arguments.length != 2) { throw new UDFArgumentLengthException( "The function json_array_extract_scalar(json, json_path) takes exactly 2 arguments."); } converters = new ObjectInspectorConverters.Converter[arguments.length]; for (int i = 0; i < arguments.length; i++) { converters[i] = ObjectInspectorConverters.getConverter(arguments[i], PrimitiveObjectInspectorFactory.writableStringObjectInspector); } return ObjectInspectorFactory .getStandardListObjectInspector(PrimitiveObjectInspectorFactory .writableStringObjectInspector); }
Example 4
Source File: UDFJsonArrayExtract.java From hive-third-functions with Apache License 2.0 | 6 votes |
@Override public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { if (arguments.length != 2) { throw new UDFArgumentLengthException( "The function json_array_extract(json, json_path) takes exactly 2 arguments."); } converters = new ObjectInspectorConverters.Converter[arguments.length]; for (int i = 0; i < arguments.length; i++) { converters[i] = ObjectInspectorConverters.getConverter(arguments[i], PrimitiveObjectInspectorFactory.writableStringObjectInspector); } return ObjectInspectorFactory .getStandardListObjectInspector(PrimitiveObjectInspectorFactory .writableStringObjectInspector); }
Example 5
Source File: Geoloc.java From hiped2 with Apache License 2.0 | 6 votes |
@Override public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { if (arguments.length != 2) { throw new UDFArgumentLengthException( "The function COUNTRY(ip, geolocfile) takes exactly 2 arguments."); } converters = new ObjectInspectorConverters.Converter[arguments.length]; for (int i = 0; i < arguments.length; i++) { converters[i] = ObjectInspectorConverters.getConverter(arguments[i], PrimitiveObjectInspectorFactory.javaStringObjectInspector); } return PrimitiveObjectInspectorFactory .getPrimitiveJavaObjectInspector(PrimitiveObjectInspector.PrimitiveCategory.STRING); }
Example 6
Source File: DateParseUDF.java From occurrence with Apache License 2.0 | 6 votes |
@Override public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { if (arguments.length != 4) { throw new UDFArgumentException("parseDate takes four arguments"); } converters = new ObjectInspectorConverters.Converter[arguments.length]; for (int i = 0; i < arguments.length; i++) { converters[i] = ObjectInspectorConverters .getConverter(arguments[i], PrimitiveObjectInspectorFactory.writableStringObjectInspector); } return ObjectInspectorFactory.getStandardStructObjectInspector(Arrays.asList("year", "month", "day", "epoch"), Arrays .<ObjectInspector>asList( PrimitiveObjectInspectorFactory.javaIntObjectInspector, PrimitiveObjectInspectorFactory.javaIntObjectInspector, PrimitiveObjectInspectorFactory.javaIntObjectInspector, PrimitiveObjectInspectorFactory.javaLongObjectInspector)); }
Example 7
Source File: ReinterpretLocationUDF.java From occurrence with Apache License 2.0 | 6 votes |
@Override public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { if (arguments.length != argLength) { throw new UDFArgumentException("compareLocationInterpretation takes 9 arguments"); } converters = new ObjectInspectorConverters.Converter[arguments.length]; for (int i = 0; i < arguments.length; i++) { converters[i] = ObjectInspectorConverters .getConverter(arguments[i], PrimitiveObjectInspectorFactory.writableStringObjectInspector); } return ObjectInspectorFactory .getStandardStructObjectInspector(Arrays.asList("decimallatitude", "decimallongitude", "countrycode"), Arrays .<ObjectInspector>asList(PrimitiveObjectInspectorFactory.javaDoubleObjectInspector, PrimitiveObjectInspectorFactory.javaDoubleObjectInspector, PrimitiveObjectInspectorFactory.javaStringObjectInspector)); }
Example 8
Source File: CoordinateCountryParseUDF.java From occurrence with Apache License 2.0 | 6 votes |
@Override public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { if (arguments.length != 4) { throw new UDFArgumentException("parseCoordinates takes four arguments"); } converters = new ObjectInspectorConverters.Converter[arguments.length]; for (int i = 0; i < arguments.length; i++) { converters[i] = ObjectInspectorConverters .getConverter(arguments[i], PrimitiveObjectInspectorFactory.writableStringObjectInspector); } return ObjectInspectorFactory .getStandardStructObjectInspector(Arrays.asList("latitude", "longitude", "country"), Arrays .<ObjectInspector>asList(PrimitiveObjectInspectorFactory.javaDoubleObjectInspector, PrimitiveObjectInspectorFactory.javaDoubleObjectInspector, PrimitiveObjectInspectorFactory.javaStringObjectInspector)); }
Example 9
Source File: UDFArrayJoin.java From hive-third-functions with Apache License 2.0 | 5 votes |
@Override public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { // Check if two arguments were passed if (arguments.length > MAX_ARG_COUNT || arguments.length < MIN_ARG_COUNT) { throw new UDFArgumentLengthException( "The function array_join(array, delimiter) or array_join(array, delimiter, null_replacement) takes exactly " + MIN_ARG_COUNT + " or " + MAX_ARG_COUNT + " arguments."); } // Check if ARRAY_IDX argument is of category LIST if (!arguments[ARRAY_IDX].getCategory().equals(ObjectInspector.Category.LIST)) { throw new UDFArgumentTypeException(ARRAY_IDX, "\"" + org.apache.hadoop.hive.serde.serdeConstants.LIST_TYPE_NAME + "\" " + "expected at function array_join, but " + "\"" + arguments[ARRAY_IDX].getTypeName() + "\" " + "is found"); } arrayOI = (ListObjectInspector) arguments[ARRAY_IDX]; arrayElementOI = arrayOI.getListElementObjectInspector(); delimiterOI = arguments[DELIMITER_IDX]; delimiterConvert = ObjectInspectorConverters.getConverter(delimiterOI, PrimitiveObjectInspectorFactory.writableStringObjectInspector); if (arguments.length == MAX_ARG_COUNT) { nullReplaceOI = arguments[NULL_REPLACE_IDX]; nullReplaceConvert = ObjectInspectorConverters.getConverter(nullReplaceOI, PrimitiveObjectInspectorFactory.writableStringObjectInspector); } result = new Text(); return PrimitiveObjectInspectorFactory.writableStringObjectInspector; }
Example 10
Source File: MapGetUDF.java From incubator-hivemall with Apache License 2.0 | 5 votes |
@Override public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { if (arguments.length != 2) { throw new UDFArgumentLengthException("map_get accepts exactly 2 arguments."); } if (arguments[0] instanceof MapObjectInspector) { this.mapOI = (MapObjectInspector) arguments[0]; } else { throw new UDFArgumentTypeException(0, "\"map\" is expected for the first argument, but \"" + arguments[0].getTypeName() + "\" is found"); } // index has to be a primitive if (!(arguments[1] instanceof PrimitiveObjectInspector)) { throw new UDFArgumentTypeException(1, "Primitive Type is expected but " + arguments[1].getTypeName() + "\" is found"); } PrimitiveObjectInspector inputOI = (PrimitiveObjectInspector) arguments[1]; ObjectInspector indexOI = ObjectInspectorConverters.getConvertedOI(inputOI, mapOI.getMapKeyObjectInspector()); this.converter = ObjectInspectorConverters.getConverter(inputOI, indexOI); return mapOI.getMapValueObjectInspector(); }
Example 11
Source File: TryCastUDF.java From incubator-hivemall with Apache License 2.0 | 5 votes |
@Override public ObjectInspector initialize(ObjectInspector[] argOIs) throws UDFArgumentException { if (argOIs.length != 2) { throw new UDFArgumentException( "try_cast(ANY src, const string typeName) expects exactly two arguments"); } this.inputOI = argOIs[0]; String typeString = HiveUtils.getConstString(argOIs, 1); ObjectInspector outputOI = HiveUtils.getObjectInspector(typeString, true); this.converter = ObjectInspectorConverters.getConverter(inputOI, outputOI); return outputOI; }
Example 12
Source File: HiveGenericUDTFTest.java From flink with Apache License 2.0 | 5 votes |
@Override public StructObjectInspector initialize(ObjectInspector[] argOIs) throws UDFArgumentException { converters = new ObjectInspectorConverters.Converter[argOIs.length]; for (int i = 0; i < converters.length; i++) { converters[i] = ObjectInspectorConverters.getConverter(argOIs[i], PrimitiveObjectInspectorFactory.javaIntObjectInspector); } return ObjectInspectorFactory.getStandardStructObjectInspector( Collections.singletonList("col1"), Collections.singletonList(PrimitiveObjectInspectorFactory.javaIntObjectInspector)); }
Example 13
Source File: HiveJsonStructReader.java From incubator-hivemall with Apache License 2.0 | 4 votes |
private Object getObjectOfCorrespondingPrimitiveType(String s, PrimitiveObjectInspector oi) throws IOException { PrimitiveTypeInfo typeInfo = oi.getTypeInfo(); if (writeablePrimitives) { Converter c = ObjectInspectorConverters.getConverter( PrimitiveObjectInspectorFactory.javaStringObjectInspector, oi); return c.convert(s); } switch (typeInfo.getPrimitiveCategory()) { case INT: return Integer.valueOf(s); case BYTE: return Byte.valueOf(s); case SHORT: return Short.valueOf(s); case LONG: return Long.valueOf(s); case BOOLEAN: return (s.equalsIgnoreCase("true")); case FLOAT: return Float.valueOf(s); case DOUBLE: return Double.valueOf(s); case STRING: return s; case BINARY: try { String t = Text.decode(s.getBytes(), 0, s.getBytes().length); return t.getBytes(); } catch (CharacterCodingException e) { LOG.warn("Error generating json binary type from object.", e); return null; } case DATE: return Date.valueOf(s); case TIMESTAMP: return Timestamp.valueOf(s); case DECIMAL: return HiveDecimal.create(s); case VARCHAR: return new HiveVarchar(s, ((BaseCharTypeInfo) typeInfo).getLength()); case CHAR: return new HiveChar(s, ((BaseCharTypeInfo) typeInfo).getLength()); default: throw new IOException( "Could not convert from string to " + typeInfo.getPrimitiveCategory()); } }