Java Code Examples for org.apache.flink.table.catalog.CatalogPartition#getProperties()
The following examples show how to use
org.apache.flink.table.catalog.CatalogPartition#getProperties() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HiveCatalog.java From flink with Apache License 2.0 | 6 votes |
private Partition instantiateHivePartition(Table hiveTable, CatalogPartitionSpec partitionSpec, CatalogPartition catalogPartition) throws PartitionSpecInvalidException { List<String> partCols = getFieldNames(hiveTable.getPartitionKeys()); List<String> partValues = getOrderedFullPartitionValues( partitionSpec, partCols, new ObjectPath(hiveTable.getDbName(), hiveTable.getTableName())); // validate partition values for (int i = 0; i < partCols.size(); i++) { if (StringUtils.isNullOrWhitespaceOnly(partValues.get(i))) { throw new PartitionSpecInvalidException(getName(), partCols, new ObjectPath(hiveTable.getDbName(), hiveTable.getTableName()), partitionSpec); } } // TODO: handle GenericCatalogPartition StorageDescriptor sd = hiveTable.getSd().deepCopy(); sd.setLocation(catalogPartition.getProperties().remove(HiveCatalogConfig.PARTITION_LOCATION)); Map<String, String> properties = new HashMap<>(catalogPartition.getProperties()); properties.put(HiveCatalogConfig.COMMENT, catalogPartition.getComment()); return HiveTableUtil.createHivePartition( hiveTable.getDbName(), hiveTable.getTableName(), partValues, sd, properties); }
Example 2
Source File: SqlToOperationConverter.java From flink with Apache License 2.0 | 6 votes |
private Operation convertAlterTableProperties(ObjectIdentifier tableIdentifier, CatalogTable oldTable, SqlAlterTableProperties alterTableProperties) { LinkedHashMap<String, String> partitionKVs = alterTableProperties.getPartitionKVs(); // it's altering partitions if (partitionKVs != null) { CatalogPartitionSpec partitionSpec = new CatalogPartitionSpec(partitionKVs); CatalogPartition catalogPartition = catalogManager.getPartition(tableIdentifier, partitionSpec) .orElseThrow(() -> new ValidationException(String.format("Partition %s of table %s doesn't exist", partitionSpec.getPartitionSpec(), tableIdentifier))); Map<String, String> newProps = new HashMap<>(catalogPartition.getProperties()); newProps.putAll(OperationConverterUtils.extractProperties(alterTableProperties.getPropertyList())); return new AlterPartitionPropertiesOperation( tableIdentifier, partitionSpec, new CatalogPartitionImpl(newProps, catalogPartition.getComment())); } else { // it's altering a table Map<String, String> newProperties = new HashMap<>(oldTable.getOptions()); newProperties.putAll(OperationConverterUtils.extractProperties(alterTableProperties.getPropertyList())); return new AlterTablePropertiesOperation(tableIdentifier, oldTable.copy(newProperties)); } }
Example 3
Source File: HiveCatalog.java From flink with Apache License 2.0 | 5 votes |
private Partition instantiateHivePartition(Table hiveTable, CatalogPartitionSpec partitionSpec, CatalogPartition catalogPartition) throws PartitionSpecInvalidException { List<String> partCols = getFieldNames(hiveTable.getPartitionKeys()); List<String> partValues = getOrderedFullPartitionValues( partitionSpec, partCols, new ObjectPath(hiveTable.getDbName(), hiveTable.getTableName())); // validate partition values for (int i = 0; i < partCols.size(); i++) { if (StringUtils.isNullOrWhitespaceOnly(partValues.get(i))) { throw new PartitionSpecInvalidException(getName(), partCols, new ObjectPath(hiveTable.getDbName(), hiveTable.getTableName()), partitionSpec); } } // TODO: handle GenericCatalogPartition StorageDescriptor sd = hiveTable.getSd().deepCopy(); sd.setLocation(catalogPartition.getProperties().remove(SqlCreateHiveTable.TABLE_LOCATION_URI)); Map<String, String> properties = new HashMap<>(catalogPartition.getProperties()); String comment = catalogPartition.getComment(); if (comment != null) { properties.put(HiveCatalogConfig.COMMENT, comment); } return HiveTableUtil.createHivePartition( hiveTable.getDbName(), hiveTable.getTableName(), partValues, sd, properties); }