Java Code Examples for org.apache.flink.table.functions.hive.conversion.HiveInspectors#getConversion()
The following examples show how to use
org.apache.flink.table.functions.hive.conversion.HiveInspectors#getConversion() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HiveGenericUDAF.java From flink with Apache License 2.0 | 6 votes |
private void init() throws HiveException { ObjectInspector[] inputInspectors = HiveInspectors.toInspectors(constantArguments, argTypes); // Flink UDAF only supports Hive UDAF's PARTIAL_1 and FINAL mode // PARTIAL1: from original data to partial aggregation data: // iterate() and terminatePartial() will be called. this.partialEvaluator = createEvaluator(inputInspectors); this.partialResultObjectInspector = partialEvaluator.init(GenericUDAFEvaluator.Mode.PARTIAL1, inputInspectors); // FINAL: from partial aggregation to full aggregation: // merge() and terminate() will be called. this.finalEvaluator = createEvaluator(inputInspectors); this.finalResultObjectInspector = finalEvaluator.init( GenericUDAFEvaluator.Mode.FINAL, new ObjectInspector[]{ partialResultObjectInspector }); conversions = new HiveObjectConversion[inputInspectors.length]; for (int i = 0; i < inputInspectors.length; i++) { conversions[i] = HiveInspectors.getConversion(inputInspectors[i], argTypes[i].getLogicalType()); } allIdentityConverter = Arrays.stream(conversions) .allMatch(conv -> conv instanceof IdentityConversion); initialized = true; }
Example 2
Source File: HiveGenericUDTF.java From flink with Apache License 2.0 | 6 votes |
@Override public void open(FunctionContext context) throws Exception { function = hiveFunctionWrapper.createFunction(); function.setCollector(input -> { Row row = (Row) HiveInspectors.toFlinkObject(returnInspector, input); HiveGenericUDTF.this.collect(row); }); ObjectInspector[] argumentInspectors = HiveInspectors.toInspectors(constantArguments, argTypes); returnInspector = function.initialize(argumentInspectors); isArgsSingleArray = HiveFunctionUtil.isSingleBoxedArray(argTypes); conversions = new HiveObjectConversion[argumentInspectors.length]; for (int i = 0; i < argumentInspectors.length; i++) { conversions[i] = HiveInspectors.getConversion(argumentInspectors[i], argTypes[i].getLogicalType()); } allIdentityConverter = Arrays.stream(conversions) .allMatch(conv -> conv instanceof IdentityConversion); }
Example 3
Source File: HiveGenericUDAF.java From flink with Apache License 2.0 | 6 votes |
private void init() throws HiveException { ObjectInspector[] inputInspectors = HiveInspectors.toInspectors(hiveShim, constantArguments, argTypes); // Flink UDAF only supports Hive UDAF's PARTIAL_1 and FINAL mode // PARTIAL1: from original data to partial aggregation data: // iterate() and terminatePartial() will be called. this.partialEvaluator = createEvaluator(inputInspectors); this.partialResultObjectInspector = partialEvaluator.init(GenericUDAFEvaluator.Mode.PARTIAL1, inputInspectors); // FINAL: from partial aggregation to full aggregation: // merge() and terminate() will be called. this.finalEvaluator = createEvaluator(inputInspectors); this.finalResultObjectInspector = finalEvaluator.init( GenericUDAFEvaluator.Mode.FINAL, new ObjectInspector[]{ partialResultObjectInspector }); conversions = new HiveObjectConversion[inputInspectors.length]; for (int i = 0; i < inputInspectors.length; i++) { conversions[i] = HiveInspectors.getConversion(inputInspectors[i], argTypes[i].getLogicalType(), hiveShim); } allIdentityConverter = Arrays.stream(conversions) .allMatch(conv -> conv instanceof IdentityConversion); initialized = true; }
Example 4
Source File: HiveGenericUDTF.java From flink with Apache License 2.0 | 6 votes |
@Override public void open(FunctionContext context) throws Exception { function = hiveFunctionWrapper.createFunction(); function.setCollector(input -> { Row row = (Row) HiveInspectors.toFlinkObject(returnInspector, input, hiveShim); HiveGenericUDTF.this.collect(row); }); ObjectInspector[] argumentInspectors = HiveInspectors.toInspectors(hiveShim, constantArguments, argTypes); returnInspector = function.initialize(argumentInspectors); isArgsSingleArray = HiveFunctionUtil.isSingleBoxedArray(argTypes); conversions = new HiveObjectConversion[argumentInspectors.length]; for (int i = 0; i < argumentInspectors.length; i++) { conversions[i] = HiveInspectors.getConversion(argumentInspectors[i], argTypes[i].getLogicalType(), hiveShim); } allIdentityConverter = Arrays.stream(conversions) .allMatch(conv -> conv instanceof IdentityConversion); }
Example 5
Source File: HiveRowDataPartitionComputer.java From flink with Apache License 2.0 | 6 votes |
public HiveRowDataPartitionComputer( HiveShim hiveShim, String defaultPartValue, String[] columnNames, DataType[] columnTypes, String[] partitionColumns) { super(defaultPartValue, columnNames, columnTypes, partitionColumns); this.partitionConverters = Arrays.stream(partitionTypes) .map(TypeConversions::fromLogicalToDataType) .map(DataFormatConverters::getConverterForDataType) .toArray(DataFormatConverters.DataFormatConverter[]::new); this.hiveObjectConversions = new HiveObjectConversion[partitionIndexes.length]; for (int i = 0; i < hiveObjectConversions.length; i++) { DataType partColType = columnTypes[partitionIndexes[i]]; ObjectInspector objectInspector = HiveInspectors.getObjectInspector(partColType); hiveObjectConversions[i] = HiveInspectors.getConversion(objectInspector, partColType.getLogicalType(), hiveShim); } }
Example 6
Source File: HiveSimpleUDF.java From flink with Apache License 2.0 | 5 votes |
@Override public void openInternal() { LOG.info("Opening HiveSimpleUDF as '{}'", hiveFunctionWrapper.getClassName()); function = hiveFunctionWrapper.createFunction(); List<TypeInfo> typeInfos = new ArrayList<>(); for (DataType arg : argTypes) { typeInfos.add(HiveTypeUtil.toHiveTypeInfo(arg)); } try { method = function.getResolver().getEvalMethod(typeInfos); returnInspector = ObjectInspectorFactory.getReflectionObjectInspector(method.getGenericReturnType(), ObjectInspectorFactory.ObjectInspectorOptions.JAVA); ObjectInspector[] argInspectors = new ObjectInspector[typeInfos.size()]; for (int i = 0; i < argTypes.length; i++) { argInspectors[i] = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(typeInfos.get(i)); } conversionHelper = new GenericUDFUtils.ConversionHelper(method, argInspectors); conversions = new HiveObjectConversion[argInspectors.length]; for (int i = 0; i < argInspectors.length; i++) { conversions[i] = HiveInspectors.getConversion(argInspectors[i], argTypes[i].getLogicalType()); } allIdentityConverter = Arrays.stream(conversions) .allMatch(conv -> conv instanceof IdentityConversion); } catch (Exception e) { throw new FlinkHiveUDFException( String.format("Failed to open HiveSimpleUDF from %s", hiveFunctionWrapper.getClassName()), e); } }
Example 7
Source File: HiveSimpleUDF.java From flink with Apache License 2.0 | 5 votes |
@Override public void openInternal() { LOG.info("Opening HiveSimpleUDF as '{}'", hiveFunctionWrapper.getClassName()); function = hiveFunctionWrapper.createFunction(); List<TypeInfo> typeInfos = new ArrayList<>(); for (DataType arg : argTypes) { typeInfos.add(HiveTypeUtil.toHiveTypeInfo(arg, false)); } try { method = function.getResolver().getEvalMethod(typeInfos); returnInspector = ObjectInspectorFactory.getReflectionObjectInspector(method.getGenericReturnType(), ObjectInspectorFactory.ObjectInspectorOptions.JAVA); ObjectInspector[] argInspectors = new ObjectInspector[typeInfos.size()]; for (int i = 0; i < argTypes.length; i++) { argInspectors[i] = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(typeInfos.get(i)); } conversionHelper = new GenericUDFUtils.ConversionHelper(method, argInspectors); conversions = new HiveObjectConversion[argInspectors.length]; for (int i = 0; i < argInspectors.length; i++) { conversions[i] = HiveInspectors.getConversion(argInspectors[i], argTypes[i].getLogicalType(), hiveShim); } allIdentityConverter = Arrays.stream(conversions) .allMatch(conv -> conv instanceof IdentityConversion); } catch (Exception e) { throw new FlinkHiveUDFException( String.format("Failed to open HiveSimpleUDF from %s", hiveFunctionWrapper.getClassName()), e); } }
Example 8
Source File: HiveRowPartitionComputer.java From flink with Apache License 2.0 | 5 votes |
HiveRowPartitionComputer(HiveShim hiveShim, String defaultPartValue, String[] columnNames, DataType[] columnTypes, String[] partitionColumns) { super(defaultPartValue, columnNames, partitionColumns); partColConversions = new HiveObjectConversion[partitionIndexes.length]; for (int i = 0; i < partColConversions.length; i++) { DataType partColType = columnTypes[partitionIndexes[i]]; ObjectInspector objectInspector = HiveInspectors.getObjectInspector(partColType); partColConversions[i] = HiveInspectors.getConversion(objectInspector, partColType.getLogicalType(), hiveShim); } }
Example 9
Source File: HiveWriterFactory.java From flink with Apache License 2.0 | 5 votes |
private void checkInitialize() throws Exception { if (initialized) { return; } JobConf jobConf = confWrapper.conf(); Object serdeLib = Class.forName(serDeInfo.getSerializationLib()).newInstance(); Preconditions.checkArgument(serdeLib instanceof Serializer && serdeLib instanceof Deserializer, "Expect a SerDe lib implementing both Serializer and Deserializer, but actually got " + serdeLib.getClass().getName()); this.recordSerDe = (Serializer) serdeLib; ReflectionUtils.setConf(recordSerDe, jobConf); // TODO: support partition properties, for now assume they're same as table properties SerDeUtils.initializeSerDe((Deserializer) recordSerDe, jobConf, tableProperties, null); this.formatFields = allColumns.length - partitionColumns.length; this.hiveConversions = new HiveObjectConversion[formatFields]; this.converters = new DataFormatConverter[formatFields]; List<ObjectInspector> objectInspectors = new ArrayList<>(hiveConversions.length); for (int i = 0; i < formatFields; i++) { DataType type = allTypes[i]; ObjectInspector objectInspector = HiveInspectors.getObjectInspector(type); objectInspectors.add(objectInspector); hiveConversions[i] = HiveInspectors.getConversion( objectInspector, type.getLogicalType(), hiveShim); converters[i] = DataFormatConverters.getConverterForDataType(type); } this.formatInspector = ObjectInspectorFactory.getStandardStructObjectInspector( Arrays.asList(allColumns).subList(0, formatFields), objectInspectors); this.initialized = true; }
Example 10
Source File: DeferredObjectAdapter.java From flink with Apache License 2.0 | 4 votes |
public DeferredObjectAdapter(ObjectInspector inspector, LogicalType logicalType) { conversion = HiveInspectors.getConversion(inspector, logicalType); }
Example 11
Source File: HiveTableOutputFormat.java From flink with Apache License 2.0 | 4 votes |
@Override public void open(int taskNumber, int numTasks) throws IOException { try { StorageDescriptor sd = hiveTablePartition.getStorageDescriptor(); Object serdeLib = Class.forName(sd.getSerdeInfo().getSerializationLib()).newInstance(); Preconditions.checkArgument(serdeLib instanceof Serializer && serdeLib instanceof Deserializer, "Expect a SerDe lib implementing both Serializer and Deserializer, but actually got " + serdeLib.getClass().getName()); recordSerDe = (Serializer) serdeLib; ReflectionUtils.setConf(recordSerDe, jobConf); // TODO: support partition properties, for now assume they're same as table properties SerDeUtils.initializeSerDe((Deserializer) recordSerDe, jobConf, tableProperties, null); outputClass = recordSerDe.getSerializedClass(); } catch (IllegalAccessException | SerDeException | InstantiationException | ClassNotFoundException e) { throw new FlinkRuntimeException("Error initializing Hive serializer", e); } TaskAttemptID taskAttemptID = TaskAttemptID.forName("attempt__0000_r_" + String.format("%" + (6 - Integer.toString(taskNumber).length()) + "s", " ").replace(" ", "0") + taskNumber + "_0"); this.jobConf.set("mapred.task.id", taskAttemptID.toString()); this.jobConf.setInt("mapred.task.partition", taskNumber); // for hadoop 2.2 this.jobConf.set("mapreduce.task.attempt.id", taskAttemptID.toString()); this.jobConf.setInt("mapreduce.task.partition", taskNumber); this.context = new TaskAttemptContextImpl(this.jobConf, taskAttemptID); if (!isDynamicPartition) { staticWriter = writerForLocation(hiveTablePartition.getStorageDescriptor().getLocation()); } else { dynamicPartitionOffset = fieldNames.length - partitionColumns.size() + hiveTablePartition.getPartitionSpec().size(); } numNonPartitionColumns = isPartitioned ? fieldNames.length - partitionColumns.size() : fieldNames.length; hiveConversions = new HiveObjectConversion[numNonPartitionColumns]; List<ObjectInspector> objectInspectors = new ArrayList<>(hiveConversions.length); for (int i = 0; i < numNonPartitionColumns; i++) { ObjectInspector objectInspector = HiveInspectors.getObjectInspector(fieldTypes[i]); objectInspectors.add(objectInspector); hiveConversions[i] = HiveInspectors.getConversion(objectInspector, fieldTypes[i].getLogicalType()); } if (!isPartitioned) { rowObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector( Arrays.asList(fieldNames), objectInspectors); } else { rowObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector( Arrays.asList(fieldNames).subList(0, fieldNames.length - partitionColumns.size()), objectInspectors); defaultPartitionName = jobConf.get(HiveConf.ConfVars.DEFAULTPARTITIONNAME.varname, HiveConf.ConfVars.DEFAULTPARTITIONNAME.defaultStrVal); } }
Example 12
Source File: DeferredObjectAdapter.java From flink with Apache License 2.0 | 4 votes |
public DeferredObjectAdapter(ObjectInspector inspector, LogicalType logicalType, HiveShim hiveShim) { conversion = HiveInspectors.getConversion(inspector, logicalType, hiveShim); }