Java Code Examples for org.apache.hadoop.hbase.HConstants#EMPTY_BYTE_ARRAY
The following examples show how to use
org.apache.hadoop.hbase.HConstants#EMPTY_BYTE_ARRAY .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ValueGetterTuple.java From phoenix with Apache License 2.0 | 6 votes |
@Override public KeyValue getValue(byte[] family, byte[] qualifier) { ImmutableBytesWritable value = null; try { value = valueGetter.getLatestValue(new ColumnReference(family, qualifier), ts); } catch (IOException e) { throw new RuntimeException(e); } byte[] rowKey = valueGetter.getRowKey(); int valueOffset = 0; int valueLength = 0; byte[] valueBytes = HConstants.EMPTY_BYTE_ARRAY; if (value != null) { valueBytes = value.get(); valueOffset = value.getOffset(); valueLength = value.getLength(); } return new KeyValue(rowKey, 0, rowKey.length, family, 0, family.length, qualifier, 0, qualifier.length, HConstants.LATEST_TIMESTAMP, Type.Put, valueBytes, valueOffset, valueLength); }
Example 2
Source File: TestAdmin.java From hbase with Apache License 2.0 | 6 votes |
@Test public void testCreateTableWithEmptyRowInTheSplitKeys() throws IOException { final byte[] tableName = Bytes.toBytes(name.getMethodName()); byte[][] splitKeys = new byte[3][]; splitKeys[0] = Bytes.toBytes("region1"); splitKeys[1] = HConstants.EMPTY_BYTE_ARRAY; splitKeys[2] = Bytes.toBytes("region2"); TableDescriptor desc = TableDescriptorBuilder.newBuilder(TableName.valueOf(tableName)) .setColumnFamily(ColumnFamilyDescriptorBuilder.of("col")).build(); try { ADMIN.createTable(desc, splitKeys); fail("Test case should fail as empty split key is passed."); } catch (IllegalArgumentException e) { LOG.info("Expected ", e); } }
Example 3
Source File: EdgeIndexModel.java From hgraphdb with Apache License 2.0 | 6 votes |
public Iterator<Edge> edgesWithLimit(HBaseVertex vertex, Direction direction, String label, String key, Object fromValue, int limit, boolean reversed) { byte[] fromBytes = fromValue != null ? ValueUtils.serialize(fromValue) : HConstants.EMPTY_BYTE_ARRAY; Tuple cacheKey = new Sextet<>(direction, label, key, ByteBuffer.wrap(fromBytes), limit, reversed); Iterator<Edge> edges = vertex.getEdgesFromCache(cacheKey); if (edges != null) { return edges; } IndexMetadata index = graph.getIndex(OperationType.READ, ElementType.EDGE, label, key); final boolean useIndex = !key.equals(Constants.CREATED_AT) && index != null; if (useIndex) { LOGGER.debug("Using edge index for ({}, {})", label, key); } else { throw new HBaseGraphNotValidException("Method edgesWithLimit requires an index be defined"); } Scan scan = getEdgesScanWithLimit(vertex, direction, index.isUnique(), key, label, fromValue, limit, reversed); return CloseableIteratorUtils.limit(performEdgesScan(vertex, scan, cacheKey, useIndex, edge -> { if (fromBytes == HConstants.EMPTY_BYTE_ARRAY) return true; byte[] propValueBytes = ValueUtils.serialize(edge.getProperty(key)); int compare = Bytes.compareTo(propValueBytes, fromBytes); return reversed ? compare <= 0 : compare >= 0; }), limit); }
Example 4
Source File: RegionGroupingProvider.java From hbase with Apache License 2.0 | 6 votes |
@Override public WAL getWAL(RegionInfo region) throws IOException { String group; if (META_WAL_PROVIDER_ID.equals(this.providerId)) { group = META_WAL_GROUP_NAME; } else { byte[] id; byte[] namespace; if (region != null) { id = region.getEncodedNameAsBytes(); namespace = region.getTable().getNamespace(); } else { id = HConstants.EMPTY_BYTE_ARRAY; namespace = null; } group = strategy.group(id, namespace); } return getWAL(group); }
Example 5
Source File: BufferedDataBlockEncoder.java From hbase with Apache License 2.0 | 6 votes |
private Cell toOnheapCell(ByteBuffer valAndTagsBuffer, int vOffset, int tagsLenSerializationSize) { byte[] tagsArray = HConstants.EMPTY_BYTE_ARRAY; int tOffset = 0; if (this.includeTags) { if (this.tagCompressionContext == null) { tagsArray = valAndTagsBuffer.array(); tOffset = valAndTagsBuffer.arrayOffset() + vOffset + this.valueLength + tagsLenSerializationSize; } else { tagsArray = Bytes.copy(tagsBuffer, 0, this.tagsLength); tOffset = 0; } } return new OnheapDecodedCell(Bytes.copy(keyBuffer, 0, this.keyLength), currentKey.getRowLength(), currentKey.getFamilyOffset(), currentKey.getFamilyLength(), currentKey.getQualifierOffset(), currentKey.getQualifierLength(), currentKey.getTimestamp(), currentKey.getTypeByte(), valAndTagsBuffer.array(), valAndTagsBuffer.arrayOffset() + vOffset, this.valueLength, memstoreTS, tagsArray, tOffset, this.tagsLength); }
Example 6
Source File: TestUtil.java From phoenix with Apache License 2.0 | 5 votes |
public static byte[][] getSplits(String tenantId) { return new byte[][] { HConstants.EMPTY_BYTE_ARRAY, Bytes.toBytes(tenantId + "00A"), Bytes.toBytes(tenantId + "00B"), Bytes.toBytes(tenantId + "00C"), }; }
Example 7
Source File: TestAdmin.java From hbase with Apache License 2.0 | 5 votes |
@Test public void testCreateTableWithOnlyEmptyStartRow() throws IOException { final byte[] tableName = Bytes.toBytes(name.getMethodName()); byte[][] splitKeys = new byte[1][]; splitKeys[0] = HConstants.EMPTY_BYTE_ARRAY; TableDescriptor desc = TableDescriptorBuilder.newBuilder(TableName.valueOf(tableName)) .setColumnFamily(ColumnFamilyDescriptorBuilder.of("col")).build(); try { ADMIN.createTable(desc, splitKeys); fail("Test case should fail as empty split key is passed."); } catch (IllegalArgumentException e) { } }
Example 8
Source File: TestAsyncTableAdminApi.java From hbase with Apache License 2.0 | 5 votes |
@Test public void testCreateTableWithOnlyEmptyStartRow() throws Exception { byte[][] splitKeys = new byte[1][]; splitKeys[0] = HConstants.EMPTY_BYTE_ARRAY; try { createTableWithDefaultConf(tableName, splitKeys); fail("Test case should fail as empty split key is passed."); } catch (CompletionException e) { assertTrue(e.getCause() instanceof IllegalArgumentException); } }
Example 9
Source File: KeyOnlyFilter.java From hbase with Apache License 2.0 | 5 votes |
@Override public byte[] getValueArray() { if (lenAsVal) { return Bytes.toBytes(cell.getValueLength()); } else { return HConstants.EMPTY_BYTE_ARRAY; } }
Example 10
Source File: MultiRowRangeFilter.java From hbase with Apache License 2.0 | 5 votes |
public BasicRowRange(byte[] startRow, boolean startRowInclusive, byte[] stopRow, boolean stopRowInclusive) { this.startRow = (startRow == null) ? HConstants.EMPTY_BYTE_ARRAY : startRow; this.startRowInclusive = startRowInclusive; this.stopRow = (stopRow == null) ? HConstants.EMPTY_BYTE_ARRAY :stopRow; this.stopRowInclusive = stopRowInclusive; }
Example 11
Source File: TestUtil.java From phoenix with BSD 3-Clause "New" or "Revised" License | 5 votes |
public static byte[][] getSplits(String tenantId) { return new byte[][] { HConstants.EMPTY_BYTE_ARRAY, Bytes.toBytes(tenantId + "00A"), Bytes.toBytes(tenantId + "00B"), Bytes.toBytes(tenantId + "00C"), }; }
Example 12
Source File: TestUtil.java From phoenix with Apache License 2.0 | 5 votes |
public static byte[][] getSplits(String tenantId) { return new byte[][] { HConstants.EMPTY_BYTE_ARRAY, Bytes.toBytes(tenantId + "00A"), Bytes.toBytes(tenantId + "00B"), Bytes.toBytes(tenantId + "00C"), }; }
Example 13
Source File: VertexIndexModel.java From hgraphdb with Apache License 2.0 | 5 votes |
public Iterator<Vertex> verticesWithLimit(String label, boolean isUnique, String key, Object from, int limit, boolean reversed) { byte[] fromBytes = from != null ? ValueUtils.serialize(from) : HConstants.EMPTY_BYTE_ARRAY; return CloseableIteratorUtils.limit(vertices(getVertexIndexScanWithLimit(label, isUnique, key, from, limit, reversed), vertex -> { if (fromBytes == HConstants.EMPTY_BYTE_ARRAY) return true; byte[] propValueBytes = ValueUtils.serialize(vertex.getProperty(key)); int compare = Bytes.compareTo(propValueBytes, fromBytes); return reversed ? compare <= 0 : compare >= 0; }), limit); }
Example 14
Source File: HbaseUtil.java From DataLink with Apache License 2.0 | 5 votes |
public static byte[] convertInnerEndRowkey(Configuration configuration) { String endRowkey = configuration.getString(Key.END_ROWKEY); if (StringUtils.isBlank(endRowkey)) { return HConstants.EMPTY_BYTE_ARRAY; } return Bytes.toBytesBinary(endRowkey); }
Example 15
Source File: KeyOnlyFilter.java From hbase with Apache License 2.0 | 5 votes |
@Override public byte[] getValueArray() { if (lenAsVal) { return Bytes.toBytes(cell.getValueLength()); } else { return HConstants.EMPTY_BYTE_ARRAY; } }
Example 16
Source File: HbaseUtil.java From DataLink with Apache License 2.0 | 5 votes |
public static byte[] convertUserEndRowkey(Configuration configuration) { String endRowkey = configuration.getString(Key.END_ROWKEY); if (StringUtils.isBlank(endRowkey)) { return HConstants.EMPTY_BYTE_ARRAY; } else { boolean isBinaryRowkey = configuration.getBool(Key.IS_BINARY_ROWKEY); return HbaseUtil.stringToBytes(endRowkey, isBinaryRowkey); } }
Example 17
Source File: ImportTsv.java From learning-hadoop with Apache License 2.0 | 4 votes |
/** * @param columnsSpecification * the list of columns to parser out, comma separated. The * row key should be the special token * TsvParser.ROWKEY_COLUMN_SPEC */ public TsvParser(String columnsSpecification, String[] keyColumns, String separatorStr) { // Configure separator byte[] separator = Bytes.toBytes(separatorStr); Preconditions.checkArgument(separator.length == 1, "TsvParser only supports single-byte separators"); separatorByte = separator[0]; // Configure columns ArrayList<String> columnStrings = Lists.newArrayList(Splitter .on(',').trimResults().split(columnsSpecification)); families = new byte[columnStrings.size()][]; qualifiers = new byte[columnStrings.size()][]; colType = new byte[columnStrings.size()]; for (int i = 0; i < columnStrings.size(); i++) { String str = columnStrings.get(i); // if (ROWKEY_COLUMN_SPEC.equals(str)) { // rowKeyColumnIndex = i; // continue; // } String[] parts = str.split(":", 3); if (parts.length == 1) { families[i] = str.getBytes(); qualifiers[i] = HConstants.EMPTY_BYTE_ARRAY; colType[i] = COL_TYPE_STRING; } else { families[i] = parts[0].getBytes(); qualifiers[i] = parts[1].getBytes(); if (parts.length > 2) { colType[i] = parseColType(parts[2]); } else colType[i] = COL_TYPE_STRING; } // System.out.println(str + ", idex " + i + ", coltpe: " + // colType[i]); } if (keyColumns != null) { keyColIndex = new int[keyColumns.length]; keyColLen = new int[keyColumns.length]; for (int i = 0; i < keyColumns.length; i++) { String[] strKdef = keyColumns[i].split(":", 2); keyColIndex[i] = Integer.parseInt(strKdef[0]); if (keyColIndex[i] >= qualifiers.length) keyColIndex[i] = 0; if (strKdef.length > 1) keyColLen[i] = Integer.parseInt(strKdef[1]); else keyColLen[i] = 0;// 0 means not specify the length } } }
Example 18
Source File: RegionInfo.java From hbase with Apache License 2.0 | 4 votes |
/** * Separate elements of a regionName. * Region name is of the format: * <code>tablename,startkey,regionIdTimestamp[_replicaId][.encodedName.]</code>. * Startkey can contain the delimiter (',') so we parse from the start and then parse from * the end. * @return Array of byte[] containing tableName, startKey and id OR null if not parseable * as a region name. */ static byte [][] parseRegionNameOrReturnNull(final byte[] regionName) { int offset = -1; for (int i = 0; i < regionName.length; i++) { if (regionName[i] == HConstants.DELIMITER) { offset = i; break; } } if (offset == -1) { return null; } byte[] tableName = new byte[offset]; System.arraycopy(regionName, 0, tableName, 0, offset); offset = -1; int endOffset = regionName.length; // check whether regionName contains encodedName if (regionName.length > MD5_HEX_LENGTH + 2 && regionName[regionName.length-1] == ENC_SEPARATOR && regionName[regionName.length-MD5_HEX_LENGTH-2] == ENC_SEPARATOR) { endOffset = endOffset - MD5_HEX_LENGTH - 2; } // parse from end byte[] replicaId = null; int idEndOffset = endOffset; for (int i = endOffset - 1; i > 0; i--) { if (regionName[i] == REPLICA_ID_DELIMITER) { //replicaId may or may not be present replicaId = new byte[endOffset - i - 1]; System.arraycopy(regionName, i + 1, replicaId, 0, endOffset - i - 1); idEndOffset = i; // do not break, continue to search for id } if (regionName[i] == HConstants.DELIMITER) { offset = i; break; } } if (offset == -1) { return null; } byte [] startKey = HConstants.EMPTY_BYTE_ARRAY; if(offset != tableName.length + 1) { startKey = new byte[offset - tableName.length - 1]; System.arraycopy(regionName, tableName.length + 1, startKey, 0, offset - tableName.length - 1); } byte [] id = new byte[idEndOffset - offset - 1]; System.arraycopy(regionName, offset + 1, id, 0, idEndOffset - offset - 1); byte [][] elements = new byte[replicaId == null ? 3 : 4][]; elements[0] = tableName; elements[1] = startKey; elements[2] = id; if (replicaId != null) { elements[3] = replicaId; } return elements; }
Example 19
Source File: HBaseTableSplitGranular.java From SpyGlass with Apache License 2.0 | 4 votes |
/** default constructor */ public HBaseTableSplitGranular() { this(HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY, "", "", HBaseConstants.SourceMode.EMPTY, false); }
Example 20
Source File: ImportTsv.java From hbase with Apache License 2.0 | 4 votes |
/** * @param columnsSpecification the list of columns to parser out, comma separated. * The row key should be the special token TsvParser.ROWKEY_COLUMN_SPEC * @param separatorStr */ public TsvParser(String columnsSpecification, String separatorStr) { // Configure separator byte[] separator = Bytes.toBytes(separatorStr); Preconditions.checkArgument(separator.length == 1, "TsvParser only supports single-byte separators"); separatorByte = separator[0]; // Configure columns ArrayList<String> columnStrings = Lists.newArrayList( Splitter.on(',').trimResults().split(columnsSpecification)); maxColumnCount = columnStrings.size(); families = new byte[maxColumnCount][]; qualifiers = new byte[maxColumnCount][]; for (int i = 0; i < columnStrings.size(); i++) { String str = columnStrings.get(i); if (ROWKEY_COLUMN_SPEC.equals(str)) { rowKeyColumnIndex = i; continue; } if (TIMESTAMPKEY_COLUMN_SPEC.equals(str)) { timestampKeyColumnIndex = i; continue; } if (ATTRIBUTES_COLUMN_SPEC.equals(str)) { attrKeyColumnIndex = i; continue; } if (CELL_VISIBILITY_COLUMN_SPEC.equals(str)) { cellVisibilityColumnIndex = i; continue; } if (CELL_TTL_COLUMN_SPEC.equals(str)) { cellTTLColumnIndex = i; continue; } String[] parts = str.split(":", 2); if (parts.length == 1) { families[i] = Bytes.toBytes(str); qualifiers[i] = HConstants.EMPTY_BYTE_ARRAY; } else { families[i] = Bytes.toBytes(parts[0]); qualifiers[i] = Bytes.toBytes(parts[1]); } } }