Java Code Examples for org.apache.hadoop.hbase.client.TableDescriptor#getColumnFamilies()
The following examples show how to use
org.apache.hadoop.hbase.client.TableDescriptor#getColumnFamilies() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SetPropertyIT.java From phoenix with Apache License 2.0 | 6 votes |
@Test public void testSetHTableAndHColumnProperties() throws Exception { Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); String ddl = "CREATE TABLE " + dataTableFullName + " (\n" +"ID1 VARCHAR(15) NOT NULL,\n" +"ID2 VARCHAR(15) NOT NULL,\n" +"CREATED_DATE DATE,\n" +"CREATION_TIME BIGINT,\n" +"LAST_USED DATE,\n" +"CONSTRAINT PK PRIMARY KEY (ID1, ID2)) " + generateDDLOptions("SALT_BUCKETS = 8"); Connection conn1 = DriverManager.getConnection(getUrl(), props); conn1.createStatement().execute(ddl); ddl = "ALTER TABLE " + dataTableFullName + " SET COMPACTION_ENABLED = FALSE, REPLICATION_SCOPE = 1"; conn1.createStatement().execute(ddl); try (Admin admin = conn1.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) { TableDescriptor tableDesc = admin.getDescriptor(TableName.valueOf(dataTableFullName)); ColumnFamilyDescriptor[] columnFamilies = tableDesc.getColumnFamilies(); assertEquals(1, columnFamilies.length); assertEquals("0", columnFamilies[0].getNameAsString()); assertEquals(1, columnFamilies[0].getScope()); assertEquals(false, tableDesc.isCompactionEnabled()); } }
Example 2
Source File: MultiHfileOutputFormat.java From phoenix with Apache License 2.0 | 6 votes |
/** * Serialize column family to compression algorithm map to configuration. * Invoked while configuring the MR job for incremental load. * * @param table to read the properties from * @param conf to persist serialized values into * @throws IOException * on failure to read column family descriptors */ @edu.umd.cs.findbugs.annotations.SuppressWarnings( value="RCN_REDUNDANT_NULLCHECK_OF_NONNULL_VALUE") @VisibleForTesting static String configureCompression(TableDescriptor tableDescriptor) throws UnsupportedEncodingException { StringBuilder compressionConfigValue = new StringBuilder(); if(tableDescriptor == null){ // could happen with mock table instance return compressionConfigValue.toString(); } ColumnFamilyDescriptor[] families = tableDescriptor.getColumnFamilies(); int i = 0; for (ColumnFamilyDescriptor familyDescriptor : families) { if (i++ > 0) { compressionConfigValue.append('&'); } compressionConfigValue.append(URLEncoder.encode( familyDescriptor.getNameAsString(), "UTF-8")); compressionConfigValue.append('='); compressionConfigValue.append(URLEncoder.encode( familyDescriptor.getCompressionType().getName(), "UTF-8")); } return compressionConfigValue.toString(); }
Example 3
Source File: TestHColumnDescriptorDefaultVersions.java From hbase with Apache License 2.0 | 6 votes |
private void verifyHColumnDescriptor(int expected, final TableName tableName, final byte[]... families) throws IOException { Admin admin = TEST_UTIL.getAdmin(); // Verify descriptor from master TableDescriptor htd = admin.getDescriptor(tableName); ColumnFamilyDescriptor[] hcds = htd.getColumnFamilies(); verifyHColumnDescriptor(expected, hcds, tableName, families); // Verify descriptor from HDFS MasterFileSystem mfs = TEST_UTIL.getMiniHBaseCluster().getMaster().getMasterFileSystem(); Path tableDir = CommonFSUtils.getTableDir(mfs.getRootDir(), tableName); TableDescriptor td = FSTableDescriptors.getTableDescriptorFromFs(mfs.getFileSystem(), tableDir); hcds = td.getColumnFamilies(); verifyHColumnDescriptor(expected, hcds, tableName, families); }
Example 4
Source File: UpgradeUtil.java From phoenix with Apache License 2.0 | 6 votes |
/** * Synchronize column family properties using the default cf properties for a given table * @param tableDesc table descriptor of table to modify * @param defaultColFam default column family used as the baseline for property synchronization * @param syncedProps Map of properties to be kept in sync as read from the default column family descriptor * @return modified table descriptor builder */ private static TableDescriptorBuilder syncColFamProperties(TableDescriptor tableDesc, ColumnFamilyDescriptor defaultColFam, Map<String, Object> syncedProps) { TableDescriptorBuilder tableDescBuilder = TableDescriptorBuilder.newBuilder(tableDesc); // Ensure that all column families have necessary properties in sync (including local index cf if present) for (ColumnFamilyDescriptor currentColFam: tableDesc.getColumnFamilies()) { if (!currentColFam.equals(defaultColFam)) { ColumnFamilyDescriptorBuilder colFamDescBuilder = ColumnFamilyDescriptorBuilder.newBuilder(currentColFam); for (String prop: MetaDataUtil.SYNCED_DATA_TABLE_AND_INDEX_COL_FAM_PROPERTIES) { String existingPropVal = Bytes.toString(currentColFam.getValue(Bytes.toBytes(prop))); String expectedPropVal = syncedProps.get(prop).toString(); if (existingPropVal == null || !existingPropVal.toLowerCase().equals(expectedPropVal.toLowerCase())) { // Need to synchronize this property for the current column family descriptor colFamDescBuilder.setValue(prop, expectedPropVal); } } if (!colFamDescBuilder.equals(ColumnFamilyDescriptorBuilder.newBuilder(currentColFam))) { tableDescBuilder.modifyColumnFamily(colFamDescBuilder.build()); } } } return tableDescBuilder; }
Example 5
Source File: MergeTableRegionsProcedure.java From hbase with Apache License 2.0 | 6 votes |
/** * Create reference file(s) to parent region hfiles in the <code>mergeDir</code> * @param regionFs merge parent region file system * @param mergeDir the temp directory in which we are accumulating references. */ private void mergeStoreFiles(final MasterProcedureEnv env, final HRegionFileSystem regionFs, final Path mergeDir) throws IOException { final TableDescriptor htd = env.getMasterServices().getTableDescriptors().get(getTableName()); for (ColumnFamilyDescriptor hcd : htd.getColumnFamilies()) { String family = hcd.getNameAsString(); final Collection<StoreFileInfo> storeFiles = regionFs.getStoreFiles(family); if (storeFiles != null && storeFiles.size() > 0) { for (StoreFileInfo storeFileInfo : storeFiles) { // Create reference file(s) to parent region file here in mergedDir. // As this procedure is running on master, use CacheConfig.DISABLED means // don't cache any block. regionFs.mergeStoreFile(mergedRegion, family, new HStoreFile( storeFileInfo, hcd.getBloomFilterType(), CacheConfig.DISABLED), mergeDir); } } } }
Example 6
Source File: RemoveColumnAction.java From hbase with Apache License 2.0 | 5 votes |
@Override public void perform() throws Exception { TableDescriptor tableDescriptor = admin.getDescriptor(tableName); ColumnFamilyDescriptor[] columnDescriptors = tableDescriptor.getColumnFamilies(); if (columnDescriptors.length <= (protectedColumns == null ? 1 : protectedColumns.size())) { return; } int index = random.nextInt(columnDescriptors.length); while(protectedColumns != null && protectedColumns.contains(columnDescriptors[index].getNameAsString())) { index = random.nextInt(columnDescriptors.length); } byte[] colDescName = columnDescriptors[index].getName(); getLogger().debug("Performing action: Removing " + Bytes.toString(colDescName)+ " from " + tableName.getNameAsString()); TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableDescriptor); builder.removeColumnFamily(colDescName); // Don't try the modify if we're stopping if (context.isStopping()) { return; } admin.modifyTable(builder.build()); }
Example 7
Source File: DataBlockEncodingValidator.java From hbase with Apache License 2.0 | 5 votes |
/** * Check DataBlockEncodings of column families are compatible. * * @return number of column families with incompatible DataBlockEncoding * @throws IOException if a remote or network exception occurs */ private int validateDBE() throws IOException { int incompatibilities = 0; LOG.info("Validating Data Block Encodings"); try (Connection connection = ConnectionFactory.createConnection(getConf()); Admin admin = connection.getAdmin()) { List<TableDescriptor> tableDescriptors = admin.listTableDescriptors(); String encoding = ""; for (TableDescriptor td : tableDescriptors) { ColumnFamilyDescriptor[] columnFamilies = td.getColumnFamilies(); for (ColumnFamilyDescriptor cfd : columnFamilies) { try { encoding = Bytes.toString(cfd.getValue(DATA_BLOCK_ENCODING)); // IllegalArgumentException will be thrown if encoding is incompatible with 2.0 DataBlockEncoding.valueOf(encoding); } catch (IllegalArgumentException e) { incompatibilities++; LOG.warn("Incompatible DataBlockEncoding for table: {}, cf: {}, encoding: {}", td.getTableName().getNameAsString(), cfd.getNameAsString(), encoding); } } } } if (incompatibilities > 0) { LOG.warn("There are {} column families with incompatible Data Block Encodings. Do not " + "upgrade until these encodings are converted to a supported one. " + "Check https://s.apache.org/prefixtree for instructions.", incompatibilities); } else { LOG.info("The used Data Block Encodings are compatible with HBase 2.0."); } return incompatibilities; }
Example 8
Source File: TransactionIT.java From phoenix with Apache License 2.0 | 5 votes |
private static void assertTTL(Admin admin, String tableName, int ttl) throws Exception { TableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(tableName)); for (ColumnFamilyDescriptor colDesc : tableDesc.getColumnFamilies()) { assertEquals(ttl,Integer.parseInt(Bytes.toString(colDesc.getValue(Bytes.toBytes(TxConstants.PROPERTY_TTL))))); assertEquals(ColumnFamilyDescriptorBuilder.DEFAULT_TTL,colDesc.getTimeToLive()); } }
Example 9
Source File: MobUtils.java From hbase with Apache License 2.0 | 5 votes |
/** * Checks whether this table has mob-enabled columns. * @param htd The current table descriptor. * @return Whether this table has mob-enabled columns. */ public static boolean hasMobColumns(TableDescriptor htd) { ColumnFamilyDescriptor[] hcds = htd.getColumnFamilies(); for (ColumnFamilyDescriptor hcd : hcds) { if (hcd.isMobEnabled()) { return true; } } return false; }
Example 10
Source File: MajorCompactionTTLRequest.java From hbase with Apache License 2.0 | 5 votes |
Map<String, Long> getStoresRequiringCompaction(TableDescriptor htd) throws IOException { try(Connection connection = getConnection(configuration)) { HRegionFileSystem fileSystem = getFileSystem(connection); Map<String, Long> familyTTLMap = Maps.newHashMap(); for (ColumnFamilyDescriptor descriptor : htd.getColumnFamilies()) { long ts = getColFamilyCutoffTime(descriptor); // If the table's TTL is forever, lets not compact any of the regions. if (ts > 0 && shouldCFBeCompacted(fileSystem, descriptor.getNameAsString(), ts)) { familyTTLMap.put(descriptor.getNameAsString(), ts); } } return familyTTLMap; } }
Example 11
Source File: SetPropertyIT.java From phoenix with Apache License 2.0 | 5 votes |
@Test public void testSetHColumnPropertyForTableWithOnlyPKCols1() throws Exception { Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); conn.setAutoCommit(false); try { String ddl = "create table " + dataTableFullName + " (" + " id char(1) NOT NULL," + " col1 integer NOT NULL," + " col2 bigint NOT NULL," + " CONSTRAINT NAME_PK PRIMARY KEY (id, col1, col2)" + " ) " + generateDDLOptions("TTL=86400, SALT_BUCKETS = 4, DEFAULT_COLUMN_FAMILY='XYZ'"); conn.createStatement().execute(ddl); ddl = "ALTER TABLE " + dataTableFullName + " SET IN_MEMORY=true"; conn.createStatement().execute(ddl); conn.commit(); try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) { TableDescriptor tableDesc = admin.getDescriptor(TableName.valueOf(dataTableFullName)); ColumnFamilyDescriptor[] columnFamilies = tableDesc.getColumnFamilies(); assertEquals(1, columnFamilies.length); assertEquals(true, columnFamilies[0].isInMemory()); assertEquals("XYZ", columnFamilies[0].getNameAsString()); } } finally { conn.close(); } }
Example 12
Source File: SetPropertyIT.java From phoenix with Apache License 2.0 | 5 votes |
@Test public void testSettingPropertiesWhenTableHasDefaultColFamilySpecified() throws Exception { Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); String ddl = "CREATE TABLE " + dataTableFullName + " (\n" +"ID1 VARCHAR(15) NOT NULL,\n" +"ID2 VARCHAR(15) NOT NULL,\n" +"CREATED_DATE DATE,\n" +"CREATION_TIME BIGINT,\n" +"CF.LAST_USED DATE,\n" +"CONSTRAINT PK PRIMARY KEY (ID1, ID2)) " + generateDDLOptions("IMMUTABLE_ROWS=true, DEFAULT_COLUMN_FAMILY = 'XYZ'" + (!columnEncoded ? ",IMMUTABLE_STORAGE_SCHEME=" + PTable.ImmutableStorageScheme.ONE_CELL_PER_COLUMN : "")); Connection conn = DriverManager.getConnection(getUrl(), props); conn.createStatement().execute(ddl); assertImmutableRows(conn, dataTableFullName, true); ddl = "ALTER TABLE " + dataTableFullName + " SET COMPACTION_ENABLED = FALSE, CF.BLOCKSIZE=50000, IMMUTABLE_ROWS = TRUE, TTL=1000"; conn.createStatement().execute(ddl); assertImmutableRows(conn, dataTableFullName, true); try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) { TableDescriptor tableDesc = admin.getDescriptor(TableName.valueOf(dataTableFullName)); ColumnFamilyDescriptor[] columnFamilies = tableDesc.getColumnFamilies(); assertEquals(2, columnFamilies.length); assertEquals("CF", columnFamilies[0].getNameAsString()); assertEquals(ColumnFamilyDescriptorBuilder.DEFAULT_REPLICATION_SCOPE, columnFamilies[0].getScope()); assertEquals(1000, columnFamilies[0].getTimeToLive()); assertEquals(50000, columnFamilies[0].getBlocksize()); assertEquals("XYZ", columnFamilies[1].getNameAsString()); assertEquals(ColumnFamilyDescriptorBuilder.DEFAULT_REPLICATION_SCOPE, columnFamilies[1].getScope()); assertEquals(1000, columnFamilies[1].getTimeToLive()); assertEquals(ColumnFamilyDescriptorBuilder.DEFAULT_BLOCKSIZE, columnFamilies[1].getBlocksize()); assertEquals(Boolean.toString(false), tableDesc.getValue(TableDescriptorBuilder.COMPACTION_ENABLED)); } }
Example 13
Source File: TransactionProcessor.java From phoenix-tephra with Apache License 2.0 | 5 votes |
@Override public void start(CoprocessorEnvironment e) throws IOException { if (e instanceof RegionCoprocessorEnvironment) { RegionCoprocessorEnvironment env = (RegionCoprocessorEnvironment) e; this.cacheSupplier = getTransactionStateCacheSupplier(env); this.cache = cacheSupplier.get(); TableDescriptor tableDesc = env.getRegion().getTableDescriptor(); for (ColumnFamilyDescriptor columnDesc : tableDesc.getColumnFamilies()) { byte[] columnTTL = columnDesc.getValue(Bytes.toBytes(TxConstants.PROPERTY_TTL)); long ttl = 0; if (columnTTL != null) { try { ttl = Long.parseLong(Bytes.toString(columnTTL)); LOG.info("Family " + columnDesc.getNameAsString() + " has TTL of " + ttl); } catch (NumberFormatException nfe) { LOG.warn("Invalid TTL value configured for column family " + columnDesc.getNameAsString() + ", value = " + Bytes.toString(columnTTL)); } } ttlByFamily.put(columnDesc.getName(), ttl); } this.allowEmptyValues = getAllowEmptyValues(env, tableDesc); this.txMaxLifetimeMillis = getTxMaxLifetimeMillis(env); this.readNonTxnData = Boolean.valueOf(tableDesc.getValue(TxConstants.READ_NON_TX_DATA)); if (readNonTxnData) { LOG.info("Reading pre-existing data enabled for table " + tableDesc.getTableName().getNameAsString()); } initializePruneState(env); } }
Example 14
Source File: SetPropertyIT.java From phoenix with Apache License 2.0 | 5 votes |
@Test public void testAddProperyToExistingColumnFamily() throws Exception { Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); conn.setAutoCommit(false); try { conn.createStatement() .execute( "CREATE TABLE " + dataTableFullName + " (a_string varchar not null, col1 integer, cf1.col2 integer, col3 integer , cf2.col4 integer " + " CONSTRAINT pk PRIMARY KEY (a_string)) " + generateDDLOptions("immutable_rows=true , SALT_BUCKETS=3 " + (!columnEncoded ? ",IMMUTABLE_STORAGE_SCHEME=" + PTable.ImmutableStorageScheme.ONE_CELL_PER_COLUMN : ""))); String ddl = "Alter table " + dataTableFullName + " add cf1.col5 integer in_memory=true"; conn.createStatement().execute(ddl); try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) { TableDescriptor tableDesc = admin.getDescriptor(TableName.valueOf(dataTableFullName)); assertTrue(tableDesc.isCompactionEnabled()); ColumnFamilyDescriptor[] columnFamilies = tableDesc.getColumnFamilies(); assertEquals(3, columnFamilies.length); assertEquals("0", columnFamilies[0].getNameAsString()); assertFalse(columnFamilies[0].isInMemory()); assertEquals("CF1", columnFamilies[1].getNameAsString()); assertTrue(columnFamilies[1].isInMemory()); assertEquals("CF2", columnFamilies[2].getNameAsString()); assertFalse(columnFamilies[2].isInMemory()); } } finally { conn.close(); } }
Example 15
Source File: HBaseAtlasHook.java From atlas with Apache License 2.0 | 5 votes |
private HBaseOperationContext handleHBaseTableOperation(TableDescriptor tableDescriptor, TableName tableName, OPERATION operation, UserGroupInformation ugi, String userName) { if (LOG.isDebugEnabled()) { LOG.debug("==> HBaseAtlasHook.handleHBaseTableOperation()"); } Map<String, String> hbaseConf = null; String owner = null; String tableNameSpace = null; TableName hbaseTableName = null; ColumnFamilyDescriptor[] columnFamilyDescriptors = null; if (tableDescriptor != null) { owner = tableDescriptor.getOwnerString(); hbaseConf = null; hbaseTableName = tableDescriptor.getTableName(); if (hbaseTableName != null) { tableNameSpace = hbaseTableName.getNamespaceAsString(); if (tableNameSpace == null) { tableNameSpace = hbaseTableName.getNameWithNamespaceInclAsString(); } } } if (owner == null) { owner = userName; } if (tableDescriptor != null) { columnFamilyDescriptors = tableDescriptor.getColumnFamilies(); } HBaseOperationContext hbaseOperationContext = new HBaseOperationContext(tableNameSpace, tableDescriptor, tableName, columnFamilyDescriptors, operation, ugi, userName, owner, hbaseConf); createAtlasInstances(hbaseOperationContext); if (LOG.isDebugEnabled()) { LOG.debug("<== HBaseAtlasHook.handleHBaseTableOperation(): {}", hbaseOperationContext); } return hbaseOperationContext; }
Example 16
Source File: SetPropertyIT.java From phoenix with Apache License 2.0 | 5 votes |
@Test public void testSetHColumnPropertyAndAddColumnForDefaultCFForTableWithOnlyPKCols() throws Exception { Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); conn.setAutoCommit(false); try { String ddl = "create table " + dataTableFullName + " (" + " id char(1) NOT NULL," + " col1 integer NOT NULL," + " col2 bigint NOT NULL," + " CONSTRAINT NAME_PK PRIMARY KEY (id, col1, col2)" + " ) " + generateDDLOptions("TTL=86400, SALT_BUCKETS = 4, DEFAULT_COLUMN_FAMILY='XYZ'"); conn.createStatement().execute(ddl); ddl = "ALTER TABLE " + dataTableFullName + " ADD COL3 INTEGER IN_MEMORY=true"; conn.createStatement().execute(ddl); conn.commit(); try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) { TableDescriptor tableDesc = admin.getDescriptor(TableName.valueOf(dataTableFullName)); ColumnFamilyDescriptor[] columnFamilies = tableDesc.getColumnFamilies(); assertEquals(1, columnFamilies.length); assertEquals(true, columnFamilies[0].isInMemory()); assertEquals("XYZ", columnFamilies[0].getNameAsString()); } } finally { conn.close(); } }
Example 17
Source File: TestMasterObserverToModifyTableSchema.java From hbase with Apache License 2.0 | 5 votes |
@Override public TableDescriptor preCreateTableRegionsInfos( ObserverContext<MasterCoprocessorEnvironment> ctx, TableDescriptor desc) throws IOException { TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(desc); for (ColumnFamilyDescriptor cfd : desc.getColumnFamilies()) { builder.modifyColumnFamily( ColumnFamilyDescriptorBuilder.newBuilder(cfd).setMaxVersions(1).build()); } return builder.build(); }
Example 18
Source File: TestMasterObserverToModifyTableSchema.java From hbase with Apache License 2.0 | 4 votes |
private void assertOneVersion(TableDescriptor td) { for (ColumnFamilyDescriptor cfd : td.getColumnFamilies()) { assertEquals(1, cfd.getMaxVersions()); } }
Example 19
Source File: TableDescriptorChecker.java From hbase with Apache License 2.0 | 4 votes |
public static void checkCompression(final TableDescriptor td) throws IOException { for (ColumnFamilyDescriptor cfd : td.getColumnFamilies()) { CompressionTest.testCompression(cfd.getCompressionType()); CompressionTest.testCompression(cfd.getCompactionCompressionType()); } }
Example 20
Source File: TableDescriptorChecker.java From hbase with Apache License 2.0 | 4 votes |
public static void checkEncryption(final Configuration conf, final TableDescriptor td) throws IOException { for (ColumnFamilyDescriptor cfd : td.getColumnFamilies()) { EncryptionTest.testEncryption(conf, cfd.getEncryptionType(), cfd.getEncryptionKey()); } }