Java Code Examples for org.apache.flink.table.catalog.hive.client.HiveShimLoader#loadHiveShim()
The following examples show how to use
org.apache.flink.table.catalog.hive.client.HiveShimLoader#loadHiveShim() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HiveBatchSource.java From Alink with Apache License 2.0 | 6 votes |
private HiveBatchSource(JobConf jobConf, ObjectPath tablePath, CatalogTable catalogTable, List<Map<String, String>> remainingPartitions, String hiveVersion, boolean partitionPruned, int[] projectedFields, boolean isLimitPushDown, long limit) { this.jobConf = Preconditions.checkNotNull(jobConf); this.tablePath = Preconditions.checkNotNull(tablePath); this.catalogTable = Preconditions.checkNotNull(catalogTable); this.remainingPartitions = remainingPartitions; this.hiveVersion = hiveVersion; hiveShim = HiveShimLoader.loadHiveShim(hiveVersion); this.partitionPruned = partitionPruned; this.projectedFields = projectedFields; this.isLimitPushDown = isLimitPushDown; this.limit = limit; }
Example 2
Source File: HiveCatalog.java From flink with Apache License 2.0 | 6 votes |
@VisibleForTesting protected HiveCatalog(String catalogName, String defaultDatabase, @Nullable HiveConf hiveConf, String hiveVersion, boolean allowEmbedded) { super(catalogName, defaultDatabase == null ? DEFAULT_DB : defaultDatabase); this.hiveConf = hiveConf == null ? createHiveConf(null) : hiveConf; if (!allowEmbedded) { checkArgument(!StringUtils.isNullOrWhitespaceOnly(this.hiveConf.getVar(HiveConf.ConfVars.METASTOREURIS)), "Embedded metastore is not allowed. Make sure you have set a valid value for " + HiveConf.ConfVars.METASTOREURIS.toString()); } checkArgument(!StringUtils.isNullOrWhitespaceOnly(hiveVersion), "hiveVersion cannot be null or empty"); this.hiveVersion = hiveVersion; hiveShim = HiveShimLoader.loadHiveShim(hiveVersion); // add this to hiveConf to make sure table factory and source/sink see the same Hive version as HiveCatalog this.hiveConf.set(HiveCatalogValidator.CATALOG_HIVE_VERSION, hiveVersion); LOG.info("Created HiveCatalog '{}'", catalogName); }
Example 3
Source File: HiveTableInputFormat.java From flink with Apache License 2.0 | 6 votes |
@Override public void open(HiveTableInputSplit split) throws IOException { HiveTablePartition partition = split.getHiveTablePartition(); if (!useMapRedReader && useOrcVectorizedRead(partition)) { this.reader = new HiveVectorizedOrcSplitReader( hiveVersion, jobConf, fieldNames, fieldTypes, selectedFields, split); } else if (!useMapRedReader && useParquetVectorizedRead(partition)) { this.reader = new HiveVectorizedParquetSplitReader( hiveVersion, jobConf, fieldNames, fieldTypes, selectedFields, split); } else { JobConf clonedConf = new JobConf(jobConf); addSchemaToConf(clonedConf); this.reader = new HiveMapredSplitReader(clonedConf, partitionKeys, fieldTypes, selectedFields, split, HiveShimLoader.loadHiveShim(hiveVersion)); } currentReadCount = 0L; }
Example 4
Source File: HiveTableSource.java From flink with Apache License 2.0 | 6 votes |
private HiveTableSource( JobConf jobConf, ReadableConfig flinkConf, ObjectPath tablePath, CatalogTable catalogTable, List<Map<String, String>> remainingPartitions, String hiveVersion, boolean partitionPruned, int[] projectedFields, boolean isLimitPushDown, long limit) { this.jobConf = Preconditions.checkNotNull(jobConf); this.flinkConf = Preconditions.checkNotNull(flinkConf); this.tablePath = Preconditions.checkNotNull(tablePath); this.catalogTable = Preconditions.checkNotNull(catalogTable); this.remainingPartitions = remainingPartitions; this.hiveVersion = hiveVersion; hiveShim = HiveShimLoader.loadHiveShim(hiveVersion); this.partitionPruned = partitionPruned; this.projectedFields = projectedFields; this.isLimitPushDown = isLimitPushDown; this.limit = limit; }
Example 5
Source File: HiveOutputFormatFactoryTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testCreateOutputFormat() { TableSchema schema = TableSchema.builder().field("x", DataTypes.INT()).build(); SerDeInfo serDeInfo = new SerDeInfo("name", LazySimpleSerDe.class.getName(), Collections.emptyMap()); HiveWriterFactory writerFactory = new HiveWriterFactory( new JobConf(), VerifyURIOutputFormat.class, serDeInfo, schema, new String[0], new Properties(), HiveShimLoader.loadHiveShim(HiveShimLoader.getHiveVersion()), false); HiveOutputFormatFactory factory = new HiveOutputFormatFactory(writerFactory); org.apache.flink.core.fs.Path path = new org.apache.flink.core.fs.Path(TEST_URI_SCHEME, TEST_URI_AUTHORITY, "/foo/path"); factory.createOutputFormat(path); }
Example 6
Source File: HiveBatchSource.java From Alink with Apache License 2.0 | 5 votes |
public HiveBatchSource(JobConf jobConf, ObjectPath tablePath, CatalogTable catalogTable) { this.jobConf = Preconditions.checkNotNull(jobConf); this.tablePath = Preconditions.checkNotNull(tablePath); this.catalogTable = Preconditions.checkNotNull(catalogTable); this.hiveVersion = Preconditions.checkNotNull(jobConf.get(HiveCatalogValidator.CATALOG_HIVE_VERSION), "Hive version is not defined"); hiveShim = HiveShimLoader.loadHiveShim(hiveVersion); partitionPruned = false; }
Example 7
Source File: HiveModule.java From flink with Apache License 2.0 | 5 votes |
public HiveModule(String hiveVersion) { checkArgument(!StringUtils.isNullOrWhitespaceOnly(hiveVersion), "hiveVersion cannot be null"); this.hiveVersion = hiveVersion; this.hiveShim = HiveShimLoader.loadHiveShim(hiveVersion); this.factory = new HiveFunctionDefinitionFactory(hiveShim); }
Example 8
Source File: HiveTableSink.java From flink with Apache License 2.0 | 5 votes |
public HiveTableSink( boolean userMrWriter, boolean isBounded, JobConf jobConf, ObjectIdentifier identifier, CatalogTable table) { this.userMrWriter = userMrWriter; this.isBounded = isBounded; this.jobConf = jobConf; this.identifier = identifier; this.catalogTable = table; hiveVersion = Preconditions.checkNotNull(jobConf.get(HiveCatalogValidator.CATALOG_HIVE_VERSION), "Hive version is not defined"); hiveShim = HiveShimLoader.loadHiveShim(hiveVersion); tableSchema = TableSchemaUtils.getPhysicalSchema(table.getSchema()); }
Example 9
Source File: HiveTableSource.java From flink with Apache License 2.0 | 5 votes |
public HiveTableSource( JobConf jobConf, ReadableConfig flinkConf, ObjectPath tablePath, CatalogTable catalogTable) { this.jobConf = Preconditions.checkNotNull(jobConf); this.flinkConf = Preconditions.checkNotNull(flinkConf); this.tablePath = Preconditions.checkNotNull(tablePath); this.catalogTable = Preconditions.checkNotNull(catalogTable); this.hiveVersion = Preconditions.checkNotNull(jobConf.get(HiveCatalogValidator.CATALOG_HIVE_VERSION), "Hive version is not defined"); hiveShim = HiveShimLoader.loadHiveShim(hiveVersion); partitionPruned = false; }
Example 10
Source File: HiveStatsUtil.java From flink with Apache License 2.0 | 4 votes |
/** * Create Flink ColumnStats from Hive ColumnStatisticsData. */ private static CatalogColumnStatisticsDataBase createTableColumnStats(DataType colType, ColumnStatisticsData stats, String hiveVersion) { HiveShim hiveShim = HiveShimLoader.loadHiveShim(hiveVersion); if (stats.isSetBinaryStats()) { BinaryColumnStatsData binaryStats = stats.getBinaryStats(); return new CatalogColumnStatisticsDataBinary( binaryStats.isSetMaxColLen() ? binaryStats.getMaxColLen() : null, binaryStats.isSetAvgColLen() ? binaryStats.getAvgColLen() : null, binaryStats.isSetNumNulls() ? binaryStats.getNumNulls() : null); } else if (stats.isSetBooleanStats()) { BooleanColumnStatsData booleanStats = stats.getBooleanStats(); return new CatalogColumnStatisticsDataBoolean( booleanStats.isSetNumTrues() ? booleanStats.getNumTrues() : null, booleanStats.isSetNumFalses() ? booleanStats.getNumFalses() : null, booleanStats.isSetNumNulls() ? booleanStats.getNumNulls() : null); } else if (hiveShim.isDateStats(stats)) { return hiveShim.toFlinkDateColStats(stats); } else if (stats.isSetDoubleStats()) { DoubleColumnStatsData doubleStats = stats.getDoubleStats(); return new CatalogColumnStatisticsDataDouble( doubleStats.isSetLowValue() ? doubleStats.getLowValue() : null, doubleStats.isSetHighValue() ? doubleStats.getHighValue() : null, doubleStats.isSetNumDVs() ? doubleStats.getNumDVs() : null, doubleStats.isSetNumNulls() ? doubleStats.getNumNulls() : null); } else if (stats.isSetLongStats()) { LongColumnStatsData longColStats = stats.getLongStats(); return new CatalogColumnStatisticsDataLong( longColStats.isSetLowValue() ? longColStats.getLowValue() : null, longColStats.isSetHighValue() ? longColStats.getHighValue() : null, longColStats.isSetNumDVs() ? longColStats.getNumDVs() : null, longColStats.isSetNumNulls() ? longColStats.getNumNulls() : null); } else if (stats.isSetStringStats()) { StringColumnStatsData stringStats = stats.getStringStats(); return new CatalogColumnStatisticsDataString( stringStats.isSetMaxColLen() ? stringStats.getMaxColLen() : null, stringStats.isSetAvgColLen() ? stringStats.getAvgColLen() : null, stringStats.isSetNumDVs() ? stringStats.getNumDVs() : null, stringStats.isSetNumDVs() ? stringStats.getNumNulls() : null); } else if (stats.isSetDecimalStats()) { DecimalColumnStatsData decimalStats = stats.getDecimalStats(); // for now, just return CatalogColumnStatisticsDataDouble for decimal columns Double max = null; if (decimalStats.isSetHighValue()) { max = toHiveDecimal(decimalStats.getHighValue()).doubleValue(); } Double min = null; if (decimalStats.isSetLowValue()) { min = toHiveDecimal(decimalStats.getLowValue()).doubleValue(); } Long ndv = decimalStats.isSetNumDVs() ? decimalStats.getNumDVs() : null; Long nullCount = decimalStats.isSetNumNulls() ? decimalStats.getNumNulls() : null; return new CatalogColumnStatisticsDataDouble(min, max, ndv, nullCount); } else { LOG.warn("Flink does not support converting ColumnStatisticsData '{}' for Hive column type '{}' yet.", stats, colType); return null; } }
Example 11
Source File: HiveGenericUDAFTest.java From flink with Apache License 2.0 | 3 votes |
private static HiveGenericUDAF init(Class hiveUdfClass, Object[] constantArgs, DataType[] argTypes) throws Exception { HiveFunctionWrapper<GenericUDAFResolver2> wrapper = new HiveFunctionWrapper(hiveUdfClass.getName()); HiveGenericUDAF udf = new HiveGenericUDAF(wrapper, HiveShimLoader.loadHiveShim(HiveShimLoader.getHiveVersion())); udf.setArgumentTypesAndConstants(constantArgs, argTypes); udf.getHiveResultType(constantArgs, argTypes); udf.open(null); return udf; }