Java Code Examples for org.apache.hadoop.hbase.TableName#getNameAsString()
The following examples show how to use
org.apache.hadoop.hbase.TableName#getNameAsString() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: RegionsMerger.java From hbase-operator-tools with Apache License 2.0 | 6 votes |
private List<RegionInfo> getOpenRegions(Connection connection, TableName table) throws Exception { List<RegionInfo> regions = new ArrayList<>(); Table metaTbl = connection.getTable(META_TABLE_NAME); String tblName = table.getNameAsString(); RowFilter rowFilter = new RowFilter(CompareOperator.EQUAL, new SubstringComparator(tblName+",")); SingleColumnValueFilter colFilter = new SingleColumnValueFilter(CATALOG_FAMILY, STATE_QUALIFIER, CompareOperator.EQUAL, Bytes.toBytes("OPEN")); Scan scan = new Scan(); FilterList filter = new FilterList(FilterList.Operator.MUST_PASS_ALL); filter.addFilter(rowFilter); filter.addFilter(colFilter); scan.setFilter(filter); try(ResultScanner rs = metaTbl.getScanner(scan)){ Result r; while ((r = rs.next()) != null) { RegionInfo region = RegionInfo.parseFrom(r.getValue(CATALOG_FAMILY, REGIONINFO_QUALIFIER)); regions.add(region); } } return regions; }
Example 2
Source File: UngroupedAggregateRegionObserver.java From phoenix with Apache License 2.0 | 6 votes |
@Override public void postSplit(ObserverContext<RegionCoprocessorEnvironment> e, HRegion l, HRegion r) throws IOException { HRegion region = e.getEnvironment().getRegion(); TableName table = region.getRegionInfo().getTable(); StatisticsCollector stats = null; try { boolean useCurrentTime = e.getEnvironment().getConfiguration().getBoolean(QueryServices.STATS_USE_CURRENT_TIME_ATTRIB, QueryServicesOptions.DEFAULT_STATS_USE_CURRENT_TIME); // Provides a means of clients controlling their timestamps to not use current time // when background tasks are updating stats. Instead we track the max timestamp of // the cells and use that. long clientTimeStamp = useCurrentTime ? TimeKeeper.SYSTEM.getCurrentTime() : StatisticsCollector.NO_TIMESTAMP; stats = new StatisticsCollector(e.getEnvironment(), table.getNameAsString(), clientTimeStamp); stats.splitStats(region, l, r); } catch (IOException ioe) { if(logger.isWarnEnabled()) { logger.warn("Error while collecting stats during split for " + table,ioe); } } finally { if (stats != null) stats.close(); } }
Example 3
Source File: TestImportTSVWithVisibilityLabels.java From hbase with Apache License 2.0 | 6 votes |
@Test public void testBulkOutputWithInvalidLabels() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID()); Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName.getNameAsString()), "hfiles"); // Prepare the arguments required for the test. String[] args = new String[] { "-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + hfiles.toString(), "-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B,HBASE_CELL_VISIBILITY", "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b", tableName.getNameAsString() }; // 2 Data rows, one with valid label and one with invalid label String data = "KEY\u001bVALUE1\u001bVALUE2\u001bprivate\nKEY1\u001bVALUE1\u001bVALUE2\u001binvalid\n"; util.createTable(tableName, FAMILY); doMROnTableTest(util, FAMILY, data, args, 1, 2); util.deleteTable(tableName); }
Example 4
Source File: TestImportTSVWithVisibilityLabels.java From hbase with Apache License 2.0 | 6 votes |
@Test public void testMRWithOutputFormat() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID()); Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName.getNameAsString()), "hfiles"); // Prepare the arguments required for the test. String[] args = new String[] { "-D" + ImportTsv.MAPPER_CONF_KEY + "=org.apache.hadoop.hbase.mapreduce.TsvImporterMapper", "-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + hfiles.toString(), "-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B,HBASE_CELL_VISIBILITY", "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b", tableName.getNameAsString() }; String data = "KEY\u001bVALUE4\u001bVALUE8\u001bsecret&private\n"; util.createTable(tableName, FAMILY); doMROnTableTest(util, FAMILY, data, args, 1); util.deleteTable(tableName); }
Example 5
Source File: TestImportTSVWithVisibilityLabels.java From hbase with Apache License 2.0 | 6 votes |
@Test public void testBulkOutputWithTsvImporterTextMapperWithInvalidLabels() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID()); Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName.getNameAsString()), "hfiles"); // Prepare the arguments required for the test. String[] args = new String[] { "-D" + ImportTsv.MAPPER_CONF_KEY + "=org.apache.hadoop.hbase.mapreduce.TsvImporterTextMapper", "-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + hfiles.toString(), "-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B,HBASE_CELL_VISIBILITY", "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b", tableName.getNameAsString() }; // 2 Data rows, one with valid label and one with invalid label String data = "KEY\u001bVALUE1\u001bVALUE2\u001bprivate\nKEY1\u001bVALUE1\u001bVALUE2\u001binvalid\n"; util.createTable(tableName, FAMILY); doMROnTableTest(util, FAMILY, data, args, 1, 2); util.deleteTable(tableName); }
Example 6
Source File: HBaseAtlasHook.java From atlas with Apache License 2.0 | 6 votes |
private void deleteColumnFamilyInstance(HBaseOperationContext hbaseOperationContext) { TableName tableName = hbaseOperationContext.getTableName(); String nameSpaceName = tableName.getNamespaceAsString(); if (nameSpaceName == null) { nameSpaceName = tableName.getNameWithNamespaceInclAsString(); } String tableNameStr = tableName.getNameAsString(); String columnFamilyName = hbaseOperationContext.getColummFamily(); String columnFamilyQName = getColumnFamilyQualifiedName(getMetadataNamespace(), nameSpaceName, tableNameStr, columnFamilyName); AtlasObjectId columnFamilyId = new AtlasObjectId(HBaseDataTypes.HBASE_COLUMN_FAMILY.getName(), REFERENCEABLE_ATTRIBUTE_NAME, columnFamilyQName); LOG.info("Delete ColumnFamily {}", columnFamilyQName); hbaseOperationContext.addMessage(new EntityDeleteRequestV2(hbaseOperationContext.getUser(), Collections.singletonList(columnFamilyId))); }
Example 7
Source File: HBaseAtlasHook.java From atlas with Apache License 2.0 | 6 votes |
private void deleteTableInstance(HBaseOperationContext hbaseOperationContext) { TableName tableName = hbaseOperationContext.getTableName(); String nameSpaceName = tableName.getNamespaceAsString(); if (nameSpaceName == null) { nameSpaceName = tableName.getNameWithNamespaceInclAsString(); } String tableNameStr = tableName.getNameAsString(); String tableQName = getTableQualifiedName(getMetadataNamespace(), nameSpaceName, tableNameStr); AtlasObjectId tableId = new AtlasObjectId(HBaseDataTypes.HBASE_TABLE.getName(), REFERENCEABLE_ATTRIBUTE_NAME, tableQName); LOG.info("Delete Table {}", tableQName); hbaseOperationContext.addMessage(new EntityDeleteRequestV2(hbaseOperationContext.getUser(), Collections.singletonList(tableId))); }
Example 8
Source File: TestHBCK2.java From hbase-operator-tools with Apache License 2.0 | 6 votes |
@Test public void testFormatFixExtraInMetaOneExtraSpecificTable() throws IOException { TableName tableName = createTestTable(5); List<RegionInfo> regions = HBCKMetaTableAccessor .getTableRegions(TEST_UTIL.getConnection(), tableName); deleteRegionDir(tableName, regions.get(0).getEncodedName()); String expectedResult = "Regions in Meta but having no equivalent dir, for each table:\n"; String result = testFormatExtraRegionsInMetaFix(tableName.getNameWithNamespaceInclAsString()); //validates initial execute message assertTrue(result.contains(expectedResult)); //validates our test table region is reported as extra expectedResult = "\t" + tableName.getNameAsString() + "->\n\t\t" + regions.get(0).getEncodedName(); assertTrue(result.contains(expectedResult)); //validates namespace region is not reported missing expectedResult = "\n\thbase:namespace -> No mismatching regions. This table is good!\n\t"; assertFalse("Should not contain: " + expectedResult, result.contains(expectedResult)); }
Example 9
Source File: TestHBCK2.java From hbase-operator-tools with Apache License 2.0 | 6 votes |
@Test public void testFormatFixExtraInMetaOneExtra() throws IOException { TableName tableName = createTestTable(5); List<RegionInfo> regions = HBCKMetaTableAccessor .getTableRegions(TEST_UTIL.getConnection(), tableName); deleteRegionDir(tableName, regions.get(0).getEncodedName()); String expectedResult = "Regions in Meta but having no equivalent dir, for each table:\n"; String result = testFormatExtraRegionsInMetaFix(null); //validates initial execute message assertTrue(result.contains(expectedResult)); //validates our test table region is reported as extra expectedResult = "\t" + tableName.getNameAsString() + "->\n\t\t" + regions.get(0).getEncodedName(); assertTrue(result.contains(expectedResult)); //validates namespace region is not reported missing expectedResult = "\n\thbase:namespace -> No mismatching regions. This table is good!\n\t"; assertTrue(result.contains(expectedResult)); }
Example 10
Source File: TestHBCK2.java From hbase-operator-tools with Apache License 2.0 | 6 votes |
@Test public void testFormatReportExtraInMetaOneExtra() throws IOException { TableName tableName = createTestTable(5); List<RegionInfo> regions = HBCKMetaTableAccessor .getTableRegions(TEST_UTIL.getConnection(), tableName); deleteRegionDir(tableName, regions.get(0).getEncodedName()); String expectedResult = "Regions in Meta but having no equivalent dir, for each table:\n"; String result = testFormatExtraRegionsInMetaReport(); //validates initial report message assertTrue(result.contains(expectedResult)); //validates our test table region is reported as extra expectedResult = "\t" + tableName.getNameAsString() + "->\n\t\t" + regions.get(0).getEncodedName(); assertTrue(result.contains(expectedResult)); //validates namespace region is not reported missing expectedResult = "\n\thbase:namespace -> No mismatching regions. This table is good!\n\t"; assertTrue(result.contains(expectedResult)); }
Example 11
Source File: TestHBCK2.java From hbase-operator-tools with Apache License 2.0 | 6 votes |
@Test public void testFormatReportMissingInMetaOneMissing() throws IOException { TableName tableName = createTestTable(5); List<RegionInfo> regions = HBCKMetaTableAccessor .getTableRegions(TEST_UTIL.getConnection(), tableName); HBCKMetaTableAccessor.deleteRegionInfo(TEST_UTIL.getConnection(), regions.get(0)); String expectedResult = "Missing Regions for each table:\n"; String result = testFormatMissingRegionsInMetaReport(); //validates initial report message assertTrue(result.contains(expectedResult)); //validates our test table region is reported missing expectedResult = "\t" + tableName.getNameAsString() + "->\n\t\t" + regions.get(0).getEncodedName(); assertTrue(result.contains(expectedResult)); //validates namespace region is not reported missing expectedResult = "\n\thbase:namespace -> No mismatching regions. This table is good!\n\t"; assertTrue(result.contains(expectedResult)); }
Example 12
Source File: HRecordGenerator.java From DataLink with Apache License 2.0 | 5 votes |
public static HRecord generate(final TableName tableName, final List<Cell> cells) { final byte[] rowkey = CellUtil.cloneRow(cells.get(0)); final List<HColumn> columns = cells.stream().map(cell -> { byte[] family = CellUtil.cloneFamily(cell); byte[] qualifier = CellUtil.cloneQualifier(cell); byte[] value = CellUtil.cloneValue(cell); byte type = cell.getTypeByte(); long timestamp = cell.getTimestamp(); return new HColumn(family, qualifier, value, type, timestamp); }).collect(toList()); return new HRecord(tableName.getNamespaceAsString(), tableName.getNameAsString(), rowkey, columns); }
Example 13
Source File: NamespaceStateManager.java From hbase with Apache License 2.0 | 5 votes |
synchronized void checkAndUpdateNamespaceTableCount(TableName table, int numRegions) throws IOException { String namespace = table.getNamespaceAsString(); NamespaceDescriptor nspdesc = getNamespaceDescriptor(namespace); if (nspdesc != null) { NamespaceTableAndRegionInfo currentStatus; currentStatus = getState(nspdesc.getName()); if ((currentStatus.getTables().size()) >= TableNamespaceManager.getMaxTables(nspdesc)) { throw new QuotaExceededException("The table " + table.getNameAsString() + " cannot be created as it would exceed maximum number of tables allowed " + " in the namespace. The total number of tables permitted is " + TableNamespaceManager.getMaxTables(nspdesc)); } if ((currentStatus.getRegionCount() + numRegions) > TableNamespaceManager .getMaxRegions(nspdesc)) { throw new QuotaExceededException("The table " + table.getNameAsString() + " is not allowed to have " + numRegions + " regions. The total number of regions permitted is only " + TableNamespaceManager.getMaxRegions(nspdesc) + ", while current region count is " + currentStatus.getRegionCount() + ". This may be transient, please retry later if there are any" + " ongoing split operations in the namespace."); } } else { throw new IOException("Namespace Descriptor found null for " + namespace + " This is unexpected."); } addTable(table, numRegions); }
Example 14
Source File: HFilePrettyPrinter.java From hbase with Apache License 2.0 | 5 votes |
/** * Checks whether the referenced mob file exists. */ private boolean mobFileExists(FileSystem fs, TableName tn, String mobFileName, String family, Set<String> foundMobFiles, Set<String> missingMobFiles) throws IOException { if (foundMobFiles.contains(mobFileName)) { return true; } if (missingMobFiles.contains(mobFileName)) { return false; } String tableName = tn.getNameAsString(); List<Path> locations = mobFileLocations.get(tableName); if (locations == null) { locations = new ArrayList<>(2); locations.add(MobUtils.getMobFamilyPath(getConf(), tn, family)); locations.add(HFileArchiveUtil.getStoreArchivePath(getConf(), tn, MobUtils.getMobRegionInfo(tn).getEncodedName(), family)); mobFileLocations.put(tn.getNameAsString(), locations); } boolean exist = false; for (Path location : locations) { Path mobFilePath = new Path(location, mobFileName); if (fs.exists(mobFilePath)) { exist = true; break; } } if (exist) { evictMobFilesIfNecessary(foundMobFiles, FOUND_MOB_FILES_CACHE_CAPACITY); foundMobFiles.add(mobFileName); } else { evictMobFilesIfNecessary(missingMobFiles, MISSING_MOB_FILES_CACHE_CAPACITY); missingMobFiles.add(mobFileName); } return exist; }
Example 15
Source File: TestImportTSVWithTTLs.java From hbase with Apache License 2.0 | 5 votes |
@Test public void testMROnTable() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID()); // Prepare the arguments required for the test. String[] args = new String[] { "-D" + ImportTsv.MAPPER_CONF_KEY + "=org.apache.hadoop.hbase.mapreduce.TsvImporterMapper", "-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B,HBASE_CELL_TTL", "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b", tableName.getNameAsString() }; String data = "KEY\u001bVALUE1\u001bVALUE2\u001b1000000\n"; util.createTable(tableName, FAMILY); doMROnTableTest(util, FAMILY, data, args, 1); util.deleteTable(tableName); }
Example 16
Source File: TestImportTSVWithOperationAttributes.java From hbase with Apache License 2.0 | 5 votes |
@Test public void testMROnTable() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID()); // Prepare the arguments required for the test. String[] args = new String[] { "-D" + ImportTsv.MAPPER_CONF_KEY + "=org.apache.hadoop.hbase.mapreduce.TsvImporterCustomTestMapperForOprAttr", "-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B,HBASE_ATTRIBUTES_KEY", "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b", tableName.getNameAsString() }; String data = "KEY\u001bVALUE1\u001bVALUE2\u001btest=>myvalue\n"; util.createTable(tableName, FAMILY); doMROnTableTest(util, FAMILY, data, args, 1, true); util.deleteTable(tableName); }
Example 17
Source File: TestBulkLoadHFilesSplitRecovery.java From hbase with Apache License 2.0 | 5 votes |
private Path buildBulkFiles(TableName table, int value) throws Exception { Path dir = util.getDataTestDirOnTestFS(table.getNameAsString()); Path bulk1 = new Path(dir, table.getNameAsString() + value); FileSystem fs = util.getTestFileSystem(); buildHFiles(fs, bulk1, value); return bulk1; }
Example 18
Source File: TestImportTsv.java From hbase with Apache License 2.0 | 4 votes |
/** * Run an ImportTsv job and perform basic validation on the results. * Returns the ImportTsv <code>Tool</code> instance so that other tests can * inspect it for further validation as necessary. This method is static to * insure non-reliance on instance's util/conf facilities. * @param args Any arguments to pass BEFORE inputFile path is appended. * @return The Tool instance used to run the test. */ protected static Tool doMROnTableTest(HBaseTestingUtility util, TableName table, String family, String data, Map<String, String> args, int valueMultiplier,int expectedKVCount) throws Exception { Configuration conf = new Configuration(util.getConfiguration()); // populate input file FileSystem fs = FileSystem.get(conf); Path inputPath = fs.makeQualified( new Path(util.getDataTestDirOnTestFS(table.getNameAsString()), "input.dat")); FSDataOutputStream op = fs.create(inputPath, true); if (data == null) { data = "KEY\u001bVALUE1\u001bVALUE2\n"; } op.write(Bytes.toBytes(data)); op.close(); LOG.debug(String.format("Wrote test data to file: %s", inputPath)); if (conf.getBoolean(FORCE_COMBINER_CONF, true)) { LOG.debug("Forcing combiner."); conf.setInt("mapreduce.map.combine.minspills", 1); } // Build args array. String[] argsArray = new String[args.size() + 2]; Iterator it = args.entrySet().iterator(); int i = 0; while (it.hasNext()) { Map.Entry pair = (Map.Entry) it.next(); argsArray[i] = "-D" + pair.getKey() + "=" + pair.getValue(); i++; } argsArray[i] = table.getNameAsString(); argsArray[i + 1] = inputPath.toString(); // run the import Tool tool = new ImportTsv(); LOG.debug("Running ImportTsv with arguments: " + Arrays.toString(argsArray)); assertEquals(0, ToolRunner.run(conf, tool, argsArray)); // Perform basic validation. If the input args did not include // ImportTsv.BULK_OUTPUT_CONF_KEY then validate data in the table. // Otherwise, validate presence of hfiles. boolean isDryRun = args.containsKey(ImportTsv.DRY_RUN_CONF_KEY) && "true".equalsIgnoreCase(args.get(ImportTsv.DRY_RUN_CONF_KEY)); if (args.containsKey(ImportTsv.BULK_OUTPUT_CONF_KEY)) { if (isDryRun) { assertFalse(String.format("Dry run mode, %s should not have been created.", ImportTsv.BULK_OUTPUT_CONF_KEY), fs.exists(new Path(ImportTsv.BULK_OUTPUT_CONF_KEY))); } else { validateHFiles(fs, args.get(ImportTsv.BULK_OUTPUT_CONF_KEY), family,expectedKVCount); } } else { validateTable(conf, table, family, valueMultiplier, isDryRun); } if (conf.getBoolean(DELETE_AFTER_LOAD_CONF, true)) { LOG.debug("Deleting test subdirectory"); util.cleanupDataTestDirOnTestFS(table.getNameAsString()); } return tool; }
Example 19
Source File: TablePartiallyOpenException.java From hbase with Apache License 2.0 | 4 votes |
/** * @param tableName the name of the table that is partially open */ public TablePartiallyOpenException(TableName tableName) { this(tableName.getNameAsString()); }
Example 20
Source File: TestSecureWAL.java From hbase with Apache License 2.0 | 4 votes |
@Test public void testSecureWAL() throws Exception { TableName tableName = TableName.valueOf(name.getMethodName().replaceAll("[^a-zA-Z0-9]", "_")); NavigableMap<byte[], Integer> scopes = new TreeMap<>(Bytes.BYTES_COMPARATOR); scopes.put(tableName.getName(), 0); RegionInfo regionInfo = RegionInfoBuilder.newBuilder(tableName).build(); final int total = 10; final byte[] row = Bytes.toBytes("row"); final byte[] family = Bytes.toBytes("family"); final byte[] value = Bytes.toBytes("Test value"); FileSystem fs = TEST_UTIL.getDFSCluster().getFileSystem(); final WALFactory wals = new WALFactory(TEST_UTIL.getConfiguration(), tableName.getNameAsString()); // Write the WAL final WAL wal = wals.getWAL(regionInfo); MultiVersionConcurrencyControl mvcc = new MultiVersionConcurrencyControl(); for (int i = 0; i < total; i++) { WALEdit kvs = new WALEdit(); kvs.add(new KeyValue(row, family, Bytes.toBytes(i), value)); wal.appendData(regionInfo, new WALKeyImpl(regionInfo.getEncodedNameAsBytes(), tableName, System.currentTimeMillis(), mvcc, scopes), kvs); } wal.sync(); final Path walPath = AbstractFSWALProvider.getCurrentFileName(wal); wals.shutdown(); // Insure edits are not plaintext long length = fs.getFileStatus(walPath).getLen(); FSDataInputStream in = fs.open(walPath); byte[] fileData = new byte[(int)length]; IOUtils.readFully(in, fileData); in.close(); assertFalse("Cells appear to be plaintext", Bytes.contains(fileData, value)); // Confirm the WAL can be read back WAL.Reader reader = wals.createReader(TEST_UTIL.getTestFileSystem(), walPath); int count = 0; WAL.Entry entry = new WAL.Entry(); while (reader.next(entry) != null) { count++; List<Cell> cells = entry.getEdit().getCells(); assertTrue("Should be one KV per WALEdit", cells.size() == 1); for (Cell cell: cells) { assertTrue("Incorrect row", Bytes.equals(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), row, 0, row.length)); assertTrue("Incorrect family", Bytes.equals(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(), family, 0, family.length)); assertTrue("Incorrect value", Bytes.equals(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength(), value, 0, value.length)); } } assertEquals("Should have read back as many KVs as written", total, count); reader.close(); }