org.apache.flink.table.catalog.hive.descriptors.HiveCatalogValidator Java Examples
The following examples show how to use
org.apache.flink.table.catalog.hive.descriptors.HiveCatalogValidator.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: HiveTableOutputFormat.java From flink with Apache License 2.0 | 6 votes |
public HiveTableOutputFormat(JobConf jobConf, ObjectPath tablePath, CatalogTable table, HiveTablePartition hiveTablePartition, Properties tableProperties, boolean overwrite) { super(jobConf.getCredentials()); Preconditions.checkNotNull(table, "table cannot be null"); Preconditions.checkNotNull(hiveTablePartition, "HiveTablePartition cannot be null"); Preconditions.checkNotNull(tableProperties, "Table properties cannot be null"); HadoopUtils.mergeHadoopConf(jobConf); this.jobConf = jobConf; this.tablePath = tablePath; this.partitionColumns = table.getPartitionKeys(); TableSchema tableSchema = table.getSchema(); this.fieldNames = tableSchema.getFieldNames(); this.fieldTypes = tableSchema.getFieldDataTypes(); this.hiveTablePartition = hiveTablePartition; this.tableProperties = tableProperties; this.overwrite = overwrite; isPartitioned = partitionColumns != null && !partitionColumns.isEmpty(); isDynamicPartition = isPartitioned && partitionColumns.size() > hiveTablePartition.getPartitionSpec().size(); hiveVersion = Preconditions.checkNotNull(jobConf.get(HiveCatalogValidator.CATALOG_HIVE_VERSION), "Hive version is not defined"); }
Example #2
Source File: HiveCatalog.java From flink with Apache License 2.0 | 6 votes |
@VisibleForTesting protected HiveCatalog(String catalogName, String defaultDatabase, @Nullable HiveConf hiveConf, String hiveVersion, boolean allowEmbedded) { super(catalogName, defaultDatabase == null ? DEFAULT_DB : defaultDatabase); this.hiveConf = hiveConf == null ? createHiveConf(null) : hiveConf; if (!allowEmbedded) { checkArgument(!StringUtils.isNullOrWhitespaceOnly(this.hiveConf.getVar(HiveConf.ConfVars.METASTOREURIS)), "Embedded metastore is not allowed. Make sure you have set a valid value for " + HiveConf.ConfVars.METASTOREURIS.toString()); } checkArgument(!StringUtils.isNullOrWhitespaceOnly(hiveVersion), "hiveVersion cannot be null or empty"); this.hiveVersion = hiveVersion; hiveShim = HiveShimLoader.loadHiveShim(hiveVersion); // add this to hiveConf to make sure table factory and source/sink see the same Hive version as HiveCatalog this.hiveConf.set(HiveCatalogValidator.CATALOG_HIVE_VERSION, hiveVersion); LOG.info("Created HiveCatalog '{}'", catalogName); }
Example #3
Source File: HiveCatalog.java From flink with Apache License 2.0 | 5 votes |
@VisibleForTesting protected HiveCatalog(String catalogName, String defaultDatabase, @Nullable HiveConf hiveConf, String hiveVersion) { super(catalogName, defaultDatabase == null ? DEFAULT_DB : defaultDatabase); this.hiveConf = hiveConf == null ? createHiveConf(null) : hiveConf; checkArgument(!StringUtils.isNullOrWhitespaceOnly(hiveVersion), "hiveVersion cannot be null or empty"); this.hiveVersion = hiveVersion; // add this to hiveConf to make sure table factory and source/sink see the same Hive version as HiveCatalog this.hiveConf.set(HiveCatalogValidator.CATALOG_HIVE_VERSION, hiveVersion); LOG.info("Created HiveCatalog '{}'", catalogName); }
Example #4
Source File: HiveCatalogFactory.java From flink with Apache License 2.0 | 5 votes |
private static DescriptorProperties getValidatedProperties(Map<String, String> properties) { final DescriptorProperties descriptorProperties = new DescriptorProperties(true); descriptorProperties.putProperties(properties); new HiveCatalogValidator().validate(descriptorProperties); return descriptorProperties; }
Example #5
Source File: HiveTableSink.java From flink with Apache License 2.0 | 5 votes |
public HiveTableSink(JobConf jobConf, ObjectPath tablePath, CatalogTable table) { this.jobConf = jobConf; this.tablePath = tablePath; this.catalogTable = table; hiveVersion = Preconditions.checkNotNull(jobConf.get(HiveCatalogValidator.CATALOG_HIVE_VERSION), "Hive version is not defined"); tableSchema = table.getSchema(); }
Example #6
Source File: HiveTableFactory.java From flink with Apache License 2.0 | 5 votes |
public HiveTableFactory(HiveConf hiveConf) { this.hiveConf = checkNotNull(hiveConf, "hiveConf cannot be null"); // this has to come from hiveConf, otherwise we may lose what user specifies in the yaml file this.hiveVersion = checkNotNull(hiveConf.get(HiveCatalogValidator.CATALOG_HIVE_VERSION), "Hive version is not defined"); }
Example #7
Source File: HiveTableSource.java From flink with Apache License 2.0 | 5 votes |
public HiveTableSource(JobConf jobConf, ObjectPath tablePath, CatalogTable catalogTable) { this.jobConf = Preconditions.checkNotNull(jobConf); this.tablePath = Preconditions.checkNotNull(tablePath); this.catalogTable = Preconditions.checkNotNull(catalogTable); this.hiveVersion = Preconditions.checkNotNull(jobConf.get(HiveCatalogValidator.CATALOG_HIVE_VERSION), "Hive version is not defined"); initAllPartitions = false; partitionPruned = false; }
Example #8
Source File: DependencyTest.java From flink with Apache License 2.0 | 5 votes |
@Override public Catalog createCatalog(String name, Map<String, String> properties) { // Test HiveCatalogFactory.createCatalog // But not use it for testing purpose assertTrue(super.createCatalog(name, properties) != null); // Developers may already have their own production/testing hive-site.xml set in their environment, // and Flink tests should avoid using those hive-site.xml. // Thus, explicitly create a testing HiveConf for unit tests here Catalog hiveCatalog = HiveTestUtils.createHiveCatalog(name, properties.get(HiveCatalogValidator.CATALOG_HIVE_VERSION)); // Creates an additional database to test tableEnv.useDatabase() will switch current database of the catalog hiveCatalog.open(); try { hiveCatalog.createDatabase( ADDITIONAL_TEST_DATABASE, new CatalogDatabaseImpl(new HashMap<>(), null), false); hiveCatalog.createTable( new ObjectPath(ADDITIONAL_TEST_DATABASE, TEST_TABLE), new CatalogTableImpl( TableSchema.builder() .field("testcol", DataTypes.INT()) .build(), new HashMap<String, String>() {{ put(CatalogConfig.IS_GENERIC, String.valueOf(true)); }}, "" ), false ); } catch (DatabaseAlreadyExistException | TableAlreadyExistException | DatabaseNotExistException e) { throw new CatalogException(e); } return hiveCatalog; }
Example #9
Source File: HiveBatchSource.java From Alink with Apache License 2.0 | 5 votes |
public HiveBatchSource(JobConf jobConf, ObjectPath tablePath, CatalogTable catalogTable) { this.jobConf = Preconditions.checkNotNull(jobConf); this.tablePath = Preconditions.checkNotNull(tablePath); this.catalogTable = Preconditions.checkNotNull(catalogTable); this.hiveVersion = Preconditions.checkNotNull(jobConf.get(HiveCatalogValidator.CATALOG_HIVE_VERSION), "Hive version is not defined"); hiveShim = HiveShimLoader.loadHiveShim(hiveVersion); partitionPruned = false; }
Example #10
Source File: HiveCatalogFactory.java From flink with Apache License 2.0 | 5 votes |
private static DescriptorProperties getValidatedProperties(Map<String, String> properties) { final DescriptorProperties descriptorProperties = new DescriptorProperties(true); descriptorProperties.putProperties(properties); new HiveCatalogValidator().validate(descriptorProperties); return descriptorProperties; }
Example #11
Source File: HiveTableSink.java From flink with Apache License 2.0 | 5 votes |
public HiveTableSink( boolean userMrWriter, boolean isBounded, JobConf jobConf, ObjectIdentifier identifier, CatalogTable table) { this.userMrWriter = userMrWriter; this.isBounded = isBounded; this.jobConf = jobConf; this.identifier = identifier; this.catalogTable = table; hiveVersion = Preconditions.checkNotNull(jobConf.get(HiveCatalogValidator.CATALOG_HIVE_VERSION), "Hive version is not defined"); hiveShim = HiveShimLoader.loadHiveShim(hiveVersion); tableSchema = TableSchemaUtils.getPhysicalSchema(table.getSchema()); }
Example #12
Source File: HiveTableSource.java From flink with Apache License 2.0 | 5 votes |
public HiveTableSource( JobConf jobConf, ReadableConfig flinkConf, ObjectPath tablePath, CatalogTable catalogTable) { this.jobConf = Preconditions.checkNotNull(jobConf); this.flinkConf = Preconditions.checkNotNull(flinkConf); this.tablePath = Preconditions.checkNotNull(tablePath); this.catalogTable = Preconditions.checkNotNull(catalogTable); this.hiveVersion = Preconditions.checkNotNull(jobConf.get(HiveCatalogValidator.CATALOG_HIVE_VERSION), "Hive version is not defined"); hiveShim = HiveShimLoader.loadHiveShim(hiveVersion); partitionPruned = false; }
Example #13
Source File: DependencyTest.java From flink with Apache License 2.0 | 5 votes |
@Override public Catalog createCatalog(String name, Map<String, String> properties) { // Developers may already have their own production/testing hive-site.xml set in their environment, // and Flink tests should avoid using those hive-site.xml. // Thus, explicitly create a testing HiveConf for unit tests here Catalog hiveCatalog = HiveTestUtils.createHiveCatalog(name, properties.get(HiveCatalogValidator.CATALOG_HIVE_VERSION)); // Creates an additional database to test tableEnv.useDatabase() will switch current database of the catalog hiveCatalog.open(); try { hiveCatalog.createDatabase( ADDITIONAL_TEST_DATABASE, new CatalogDatabaseImpl(new HashMap<>(), null), false); hiveCatalog.createTable( new ObjectPath(ADDITIONAL_TEST_DATABASE, TEST_TABLE), new CatalogTableImpl( TableSchema.builder() .field("testcol", DataTypes.INT()) .build(), new HashMap<String, String>() {{ put(CatalogConfig.IS_GENERIC, String.valueOf(false)); }}, "" ), false ); // create a table to test parameterized types hiveCatalog.createTable(new ObjectPath("default", TABLE_WITH_PARAMETERIZED_TYPES), tableWithParameterizedTypes(), false); } catch (DatabaseAlreadyExistException | TableAlreadyExistException | DatabaseNotExistException e) { throw new CatalogException(e); } return hiveCatalog; }