Java Code Examples for org.apache.flink.table.functions.hive.conversion.HiveInspectors#toInspectors()
The following examples show how to use
org.apache.flink.table.functions.hive.conversion.HiveInspectors#toInspectors() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HiveGenericUDAF.java From flink with Apache License 2.0 | 6 votes |
private void init() throws HiveException { ObjectInspector[] inputInspectors = HiveInspectors.toInspectors(constantArguments, argTypes); // Flink UDAF only supports Hive UDAF's PARTIAL_1 and FINAL mode // PARTIAL1: from original data to partial aggregation data: // iterate() and terminatePartial() will be called. this.partialEvaluator = createEvaluator(inputInspectors); this.partialResultObjectInspector = partialEvaluator.init(GenericUDAFEvaluator.Mode.PARTIAL1, inputInspectors); // FINAL: from partial aggregation to full aggregation: // merge() and terminate() will be called. this.finalEvaluator = createEvaluator(inputInspectors); this.finalResultObjectInspector = finalEvaluator.init( GenericUDAFEvaluator.Mode.FINAL, new ObjectInspector[]{ partialResultObjectInspector }); conversions = new HiveObjectConversion[inputInspectors.length]; for (int i = 0; i < inputInspectors.length; i++) { conversions[i] = HiveInspectors.getConversion(inputInspectors[i], argTypes[i].getLogicalType()); } allIdentityConverter = Arrays.stream(conversions) .allMatch(conv -> conv instanceof IdentityConversion); initialized = true; }
Example 2
Source File: HiveGenericUDTF.java From flink with Apache License 2.0 | 6 votes |
@Override public void open(FunctionContext context) throws Exception { function = hiveFunctionWrapper.createFunction(); function.setCollector(input -> { Row row = (Row) HiveInspectors.toFlinkObject(returnInspector, input); HiveGenericUDTF.this.collect(row); }); ObjectInspector[] argumentInspectors = HiveInspectors.toInspectors(constantArguments, argTypes); returnInspector = function.initialize(argumentInspectors); isArgsSingleArray = HiveFunctionUtil.isSingleBoxedArray(argTypes); conversions = new HiveObjectConversion[argumentInspectors.length]; for (int i = 0; i < argumentInspectors.length; i++) { conversions[i] = HiveInspectors.getConversion(argumentInspectors[i], argTypes[i].getLogicalType()); } allIdentityConverter = Arrays.stream(conversions) .allMatch(conv -> conv instanceof IdentityConversion); }
Example 3
Source File: HiveGenericUDF.java From flink with Apache License 2.0 | 6 votes |
@Override public DataType getHiveResultType(Object[] constantArguments, DataType[] argTypes) { LOG.info("Getting result type of HiveGenericUDF from {}", hiveFunctionWrapper.getClassName()); try { ObjectInspector[] argumentInspectors = HiveInspectors.toInspectors(constantArguments, argTypes); ObjectInspector resultObjectInspector = hiveFunctionWrapper.createFunction().initializeAndFoldConstants(argumentInspectors); return HiveTypeUtil.toFlinkType( TypeInfoUtils.getTypeInfoFromObjectInspector(resultObjectInspector)); } catch (UDFArgumentException e) { throw new FlinkHiveUDFException(e); } }
Example 4
Source File: HiveGenericUDAF.java From flink with Apache License 2.0 | 6 votes |
private void init() throws HiveException { ObjectInspector[] inputInspectors = HiveInspectors.toInspectors(hiveShim, constantArguments, argTypes); // Flink UDAF only supports Hive UDAF's PARTIAL_1 and FINAL mode // PARTIAL1: from original data to partial aggregation data: // iterate() and terminatePartial() will be called. this.partialEvaluator = createEvaluator(inputInspectors); this.partialResultObjectInspector = partialEvaluator.init(GenericUDAFEvaluator.Mode.PARTIAL1, inputInspectors); // FINAL: from partial aggregation to full aggregation: // merge() and terminate() will be called. this.finalEvaluator = createEvaluator(inputInspectors); this.finalResultObjectInspector = finalEvaluator.init( GenericUDAFEvaluator.Mode.FINAL, new ObjectInspector[]{ partialResultObjectInspector }); conversions = new HiveObjectConversion[inputInspectors.length]; for (int i = 0; i < inputInspectors.length; i++) { conversions[i] = HiveInspectors.getConversion(inputInspectors[i], argTypes[i].getLogicalType(), hiveShim); } allIdentityConverter = Arrays.stream(conversions) .allMatch(conv -> conv instanceof IdentityConversion); initialized = true; }
Example 5
Source File: HiveGenericUDTF.java From flink with Apache License 2.0 | 6 votes |
@Override public void open(FunctionContext context) throws Exception { function = hiveFunctionWrapper.createFunction(); function.setCollector(input -> { Row row = (Row) HiveInspectors.toFlinkObject(returnInspector, input, hiveShim); HiveGenericUDTF.this.collect(row); }); ObjectInspector[] argumentInspectors = HiveInspectors.toInspectors(hiveShim, constantArguments, argTypes); returnInspector = function.initialize(argumentInspectors); isArgsSingleArray = HiveFunctionUtil.isSingleBoxedArray(argTypes); conversions = new HiveObjectConversion[argumentInspectors.length]; for (int i = 0; i < argumentInspectors.length; i++) { conversions[i] = HiveInspectors.getConversion(argumentInspectors[i], argTypes[i].getLogicalType(), hiveShim); } allIdentityConverter = Arrays.stream(conversions) .allMatch(conv -> conv instanceof IdentityConversion); }
Example 6
Source File: HiveGenericUDF.java From flink with Apache License 2.0 | 6 votes |
@Override public void openInternal() { LOG.info("Open HiveGenericUDF as {}", hiveFunctionWrapper.getClassName()); function = hiveFunctionWrapper.createFunction(); ObjectInspector[] argInspectors = HiveInspectors.toInspectors(hiveShim, constantArguments, argTypes); try { returnInspector = function.initializeAndFoldConstants(argInspectors); } catch (UDFArgumentException e) { throw new FlinkHiveUDFException(e); } deferredObjects = new GenericUDF.DeferredObject[argTypes.length]; for (int i = 0; i < deferredObjects.length; i++) { deferredObjects[i] = new DeferredObjectAdapter( argInspectors[i], argTypes[i].getLogicalType(), hiveShim ); } }
Example 7
Source File: HiveGenericUDF.java From flink with Apache License 2.0 | 6 votes |
@Override public DataType getHiveResultType(Object[] constantArguments, DataType[] argTypes) { LOG.info("Getting result type of HiveGenericUDF from {}", hiveFunctionWrapper.getClassName()); try { ObjectInspector[] argumentInspectors = HiveInspectors.toInspectors(hiveShim, constantArguments, argTypes); ObjectInspector resultObjectInspector = hiveFunctionWrapper.createFunction().initializeAndFoldConstants(argumentInspectors); return HiveTypeUtil.toFlinkType( TypeInfoUtils.getTypeInfoFromObjectInspector(resultObjectInspector)); } catch (UDFArgumentException e) { throw new FlinkHiveUDFException(e); } }
Example 8
Source File: HiveGenericUDTF.java From flink with Apache License 2.0 | 5 votes |
@Override public DataType getHiveResultType(Object[] constantArguments, DataType[] argTypes) { LOG.info("Getting result type of HiveGenericUDTF with {}", hiveFunctionWrapper.getClassName()); try { ObjectInspector[] argumentInspectors = HiveInspectors.toInspectors(constantArguments, argTypes); return HiveTypeUtil.toFlinkType( hiveFunctionWrapper.createFunction().initialize(argumentInspectors)); } catch (UDFArgumentException e) { throw new FlinkHiveUDFException(e); } }
Example 9
Source File: HiveGenericUDTF.java From flink with Apache License 2.0 | 5 votes |
@Override public DataType getHiveResultType(Object[] constantArguments, DataType[] argTypes) { LOG.info("Getting result type of HiveGenericUDTF with {}", hiveFunctionWrapper.getClassName()); try { ObjectInspector[] argumentInspectors = HiveInspectors.toInspectors(hiveShim, constantArguments, argTypes); return HiveTypeUtil.toFlinkType( hiveFunctionWrapper.createFunction().initialize(argumentInspectors)); } catch (UDFArgumentException e) { throw new FlinkHiveUDFException(e); } }
Example 10
Source File: HiveGenericUDTFTest.java From flink with Apache License 2.0 | 4 votes |
private static HiveGenericUDTF init(Class hiveUdfClass, Object[] constantArgs, DataType[] argTypes) throws Exception { HiveFunctionWrapper<GenericUDTF> wrapper = new HiveFunctionWrapper(hiveUdfClass.getName()); HiveGenericUDTF udf = new HiveGenericUDTF(wrapper); udf.setArgumentTypesAndConstants(constantArgs, argTypes); udf.getHiveResultType(constantArgs, argTypes); ObjectInspector[] argumentInspectors = HiveInspectors.toInspectors(constantArgs, argTypes); ObjectInspector returnInspector = wrapper.createFunction().initialize(argumentInspectors); udf.open(null); collector = new TestCollector(returnInspector); udf.setCollector(collector); return udf; }
Example 11
Source File: HiveGenericUDTFTest.java From flink with Apache License 2.0 | 4 votes |
private static HiveGenericUDTF init(Class hiveUdfClass, Object[] constantArgs, DataType[] argTypes) throws Exception { HiveFunctionWrapper<GenericUDTF> wrapper = new HiveFunctionWrapper(hiveUdfClass.getName()); HiveGenericUDTF udf = new HiveGenericUDTF(wrapper, hiveShim); udf.setArgumentTypesAndConstants(constantArgs, argTypes); udf.getHiveResultType(constantArgs, argTypes); ObjectInspector[] argumentInspectors = HiveInspectors.toInspectors(hiveShim, constantArgs, argTypes); ObjectInspector returnInspector = wrapper.createFunction().initialize(argumentInspectors); udf.open(null); collector = new TestCollector(returnInspector); udf.setCollector(collector); return udf; }