org.apache.flink.table.catalog.CatalogPartition Java Examples
The following examples show how to use
org.apache.flink.table.catalog.CatalogPartition.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: SqlToOperationConverter.java From flink with Apache License 2.0 | 6 votes |
private Operation convertAlterTableProperties(ObjectIdentifier tableIdentifier, CatalogTable oldTable, SqlAlterTableProperties alterTableProperties) { LinkedHashMap<String, String> partitionKVs = alterTableProperties.getPartitionKVs(); // it's altering partitions if (partitionKVs != null) { CatalogPartitionSpec partitionSpec = new CatalogPartitionSpec(partitionKVs); CatalogPartition catalogPartition = catalogManager.getPartition(tableIdentifier, partitionSpec) .orElseThrow(() -> new ValidationException(String.format("Partition %s of table %s doesn't exist", partitionSpec.getPartitionSpec(), tableIdentifier))); Map<String, String> newProps = new HashMap<>(catalogPartition.getProperties()); newProps.putAll(OperationConverterUtils.extractProperties(alterTableProperties.getPropertyList())); return new AlterPartitionPropertiesOperation( tableIdentifier, partitionSpec, new CatalogPartitionImpl(newProps, catalogPartition.getComment())); } else { // it's altering a table Map<String, String> newProperties = new HashMap<>(oldTable.getOptions()); newProperties.putAll(OperationConverterUtils.extractProperties(alterTableProperties.getPropertyList())); return new AlterTablePropertiesOperation(tableIdentifier, oldTable.copy(newProperties)); } }
Example #2
Source File: HiveCatalog.java From flink with Apache License 2.0 | 6 votes |
private Partition instantiateHivePartition(Table hiveTable, CatalogPartitionSpec partitionSpec, CatalogPartition catalogPartition) throws PartitionSpecInvalidException { List<String> partCols = getFieldNames(hiveTable.getPartitionKeys()); List<String> partValues = getOrderedFullPartitionValues( partitionSpec, partCols, new ObjectPath(hiveTable.getDbName(), hiveTable.getTableName())); // validate partition values for (int i = 0; i < partCols.size(); i++) { if (StringUtils.isNullOrWhitespaceOnly(partValues.get(i))) { throw new PartitionSpecInvalidException(getName(), partCols, new ObjectPath(hiveTable.getDbName(), hiveTable.getTableName()), partitionSpec); } } // TODO: handle GenericCatalogPartition StorageDescriptor sd = hiveTable.getSd().deepCopy(); sd.setLocation(catalogPartition.getProperties().remove(HiveCatalogConfig.PARTITION_LOCATION)); Map<String, String> properties = new HashMap<>(catalogPartition.getProperties()); properties.put(HiveCatalogConfig.COMMENT, catalogPartition.getComment()); return HiveTableUtil.createHivePartition( hiveTable.getDbName(), hiveTable.getTableName(), partValues, sd, properties); }
Example #3
Source File: HiveCatalogHiveMetadataTest.java From flink with Apache License 2.0 | 6 votes |
@Override @Test public void testAlterPartition() throws Exception { catalog.createDatabase(db1, createDb(), false); catalog.createTable(path1, createPartitionedTable(), false); catalog.createPartition(path1, createPartitionSpec(), createPartition(), false); assertEquals(Collections.singletonList(createPartitionSpec()), catalog.listPartitions(path1)); CatalogPartition cp = catalog.getPartition(path1, createPartitionSpec()); CatalogTestUtil.checkEquals(createPartition(), cp); assertNull(cp.getProperties().get("k")); CatalogPartition another = createPartition(); another.getProperties().put("k", "v"); another.getProperties().put(SqlAlterHiveTable.ALTER_TABLE_OP, SqlAlterHiveTable.AlterTableOp.CHANGE_TBL_PROPS.name()); catalog.alterPartition(path1, createPartitionSpec(), another, false); assertEquals(Collections.singletonList(createPartitionSpec()), catalog.listPartitions(path1)); cp = catalog.getPartition(path1, createPartitionSpec()); CatalogTestUtil.checkEquals(another, cp); assertEquals("v", cp.getProperties().get("k")); }
Example #4
Source File: HiveCatalog.java From flink with Apache License 2.0 | 5 votes |
private static void ensureTableAndPartitionMatch(Table hiveTable, CatalogPartition catalogPartition) { boolean tableIsGeneric = Boolean.valueOf(hiveTable.getParameters().get(CatalogConfig.IS_GENERIC)); boolean partitionIsGeneric = Boolean.valueOf(catalogPartition.getProperties().get(CatalogConfig.IS_GENERIC)); if (tableIsGeneric != partitionIsGeneric) { throw new CatalogException(String.format("Cannot handle %s partition for %s table", catalogPartition.getClass().getName(), tableIsGeneric ? "generic" : "non-generic")); } }
Example #5
Source File: AddPartitionsOperation.java From flink with Apache License 2.0 | 5 votes |
public AddPartitionsOperation(ObjectIdentifier tableIdentifier, boolean ifNotExists, List<CatalogPartitionSpec> partitionSpecs, List<CatalogPartition> catalogPartitions) { super(tableIdentifier); this.ifNotExists = ifNotExists; this.partitionSpecs = partitionSpecs; this.catalogPartitions = catalogPartitions; }
Example #6
Source File: HiveCatalog.java From flink with Apache License 2.0 | 5 votes |
private Partition instantiateHivePartition(Table hiveTable, CatalogPartitionSpec partitionSpec, CatalogPartition catalogPartition) throws PartitionSpecInvalidException { List<String> partCols = getFieldNames(hiveTable.getPartitionKeys()); List<String> partValues = getOrderedFullPartitionValues( partitionSpec, partCols, new ObjectPath(hiveTable.getDbName(), hiveTable.getTableName())); // validate partition values for (int i = 0; i < partCols.size(); i++) { if (StringUtils.isNullOrWhitespaceOnly(partValues.get(i))) { throw new PartitionSpecInvalidException(getName(), partCols, new ObjectPath(hiveTable.getDbName(), hiveTable.getTableName()), partitionSpec); } } // TODO: handle GenericCatalogPartition StorageDescriptor sd = hiveTable.getSd().deepCopy(); sd.setLocation(catalogPartition.getProperties().remove(SqlCreateHiveTable.TABLE_LOCATION_URI)); Map<String, String> properties = new HashMap<>(catalogPartition.getProperties()); String comment = catalogPartition.getComment(); if (comment != null) { properties.put(HiveCatalogConfig.COMMENT, comment); } return HiveTableUtil.createHivePartition( hiveTable.getDbName(), hiveTable.getTableName(), partValues, sd, properties); }
Example #7
Source File: HiveCatalog.java From flink with Apache License 2.0 | 5 votes |
private static void ensureTableAndPartitionMatch(Table hiveTable, CatalogPartition catalogPartition) { boolean tableIsGeneric = isGenericForGet(hiveTable.getParameters()); boolean partitionIsGeneric = isGenericForGet(catalogPartition.getProperties()); if (tableIsGeneric != partitionIsGeneric) { throw new CatalogException(String.format("Cannot handle %s partition for %s table", catalogPartition.getClass().getName(), tableIsGeneric ? "generic" : "non-generic")); } }
Example #8
Source File: AbstractJdbcCatalog.java From flink with Apache License 2.0 | 4 votes |
@Override public void createPartition(ObjectPath tablePath, CatalogPartitionSpec partitionSpec, CatalogPartition partition, boolean ignoreIfExists) throws TableNotExistException, TableNotPartitionedException, PartitionSpecInvalidException, PartitionAlreadyExistsException, CatalogException { throw new UnsupportedOperationException(); }
Example #9
Source File: AlterPartitionPropertiesOperation.java From flink with Apache License 2.0 | 4 votes |
public CatalogPartition getCatalogPartition() { return catalogPartition; }
Example #10
Source File: AlterPartitionPropertiesOperation.java From flink with Apache License 2.0 | 4 votes |
public AlterPartitionPropertiesOperation(ObjectIdentifier tableIdentifier, CatalogPartitionSpec partitionSpec, CatalogPartition catalogPartition) { super(tableIdentifier, partitionSpec); this.catalogPartition = catalogPartition; }
Example #11
Source File: AddPartitionsOperation.java From flink with Apache License 2.0 | 4 votes |
public List<CatalogPartition> getCatalogPartitions() { return catalogPartitions; }
Example #12
Source File: HiveCatalogGenericMetadataTest.java From flink with Apache License 2.0 | 4 votes |
@Override public CatalogPartition createPartition() { throw new UnsupportedOperationException(); }
Example #13
Source File: AbstractJdbcCatalog.java From flink with Apache License 2.0 | 4 votes |
@Override public void alterPartition(ObjectPath tablePath, CatalogPartitionSpec partitionSpec, CatalogPartition newPartition, boolean ignoreIfNotExists) throws PartitionNotExistException, CatalogException { throw new UnsupportedOperationException(); }
Example #14
Source File: PulsarCatalog.java From pulsar-flink with Apache License 2.0 | 4 votes |
@Override public CatalogPartition getPartition(ObjectPath tablePath, CatalogPartitionSpec partitionSpec) throws PartitionNotExistException, CatalogException { throw new UnsupportedOperationException(); }
Example #15
Source File: AbstractJdbcCatalog.java From flink with Apache License 2.0 | 4 votes |
@Override public CatalogPartition getPartition(ObjectPath tablePath, CatalogPartitionSpec partitionSpec) throws PartitionNotExistException, CatalogException { throw new UnsupportedOperationException(); }
Example #16
Source File: AbstractReadOnlyCatalog.java From bahir-flink with Apache License 2.0 | 4 votes |
@Override public void alterPartition(ObjectPath tablePath, CatalogPartitionSpec partitionSpec, CatalogPartition newPartition, boolean ignoreIfNotExists) throws PartitionNotExistException, CatalogException { throw UNSUPPORTED_ERR; }
Example #17
Source File: AbstractReadOnlyCatalog.java From bahir-flink with Apache License 2.0 | 4 votes |
@Override public void createPartition(ObjectPath tablePath, CatalogPartitionSpec partitionSpec, CatalogPartition partition, boolean ignoreIfExists) throws TableNotExistException, TableNotPartitionedException, PartitionSpecInvalidException, PartitionAlreadyExistsException, CatalogException { throw UNSUPPORTED_ERR; }
Example #18
Source File: AbstractReadOnlyCatalog.java From bahir-flink with Apache License 2.0 | 4 votes |
@Override public CatalogPartition getPartition(ObjectPath tablePath, CatalogPartitionSpec partitionSpec) throws PartitionNotExistException, CatalogException { throw UNSUPPORTED_ERR; }
Example #19
Source File: HiveCatalogGenericMetadataTest.java From flink with Apache License 2.0 | 4 votes |
@Override public CatalogPartition createPartition() { throw new UnsupportedOperationException(); }
Example #20
Source File: PulsarCatalog.java From pulsar-flink with Apache License 2.0 | 4 votes |
@Override public void alterPartition(ObjectPath tablePath, CatalogPartitionSpec partitionSpec, CatalogPartition newPartition, boolean ignoreIfNotExists) throws PartitionNotExistException, CatalogException { throw new UnsupportedOperationException(); }
Example #21
Source File: PulsarCatalog.java From pulsar-flink with Apache License 2.0 | 4 votes |
@Override public void createPartition(ObjectPath tablePath, CatalogPartitionSpec partitionSpec, CatalogPartition partition, boolean ignoreIfExists) throws TableNotExistException, TableNotPartitionedException, PartitionSpecInvalidException, PartitionAlreadyExistsException, CatalogException { throw new UnsupportedOperationException(); }