Java Code Examples for org.apache.hadoop.util.StringUtils#byteToHexString()
The following examples show how to use
org.apache.hadoop.util.StringUtils#byteToHexString() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DFSZKFailoverController.java From hadoop with Apache License 2.0 | 6 votes |
@Override protected HAServiceTarget dataToTarget(byte[] data) { ActiveNodeInfo proto; try { proto = ActiveNodeInfo.parseFrom(data); } catch (InvalidProtocolBufferException e) { throw new RuntimeException("Invalid data in ZK: " + StringUtils.byteToHexString(data)); } NNHAServiceTarget ret = new NNHAServiceTarget( conf, proto.getNameserviceId(), proto.getNamenodeId()); InetSocketAddress addressFromProtobuf = new InetSocketAddress( proto.getHostname(), proto.getPort()); if (!addressFromProtobuf.equals(ret.getAddress())) { throw new RuntimeException("Mismatched address stored in ZK for " + ret + ": Stored protobuf was " + proto + ", address from our own " + "configuration for this NameNode was " + ret.getAddress()); } ret.setZkfcPort(proto.getZkfcPort()); return ret; }
Example 2
Source File: Client.java From big-c with Apache License 2.0 | 6 votes |
/** Check the rpc response header. */ void checkResponse(RpcResponseHeaderProto header) throws IOException { if (header == null) { throw new EOFException("Response is null."); } if (header.hasClientId()) { // check client IDs final byte[] id = header.getClientId().toByteArray(); if (!Arrays.equals(id, RpcConstants.DUMMY_CLIENT_ID)) { if (!Arrays.equals(id, clientId)) { throw new IOException("Client IDs not matched: local ID=" + StringUtils.byteToHexString(clientId) + ", ID in response=" + StringUtils.byteToHexString(header.getClientId().toByteArray())); } } } }
Example 3
Source File: Display.java From big-c with Apache License 2.0 | 6 votes |
@Override protected void processPath(PathData item) throws IOException { if (item.stat.isDirectory()) { throw new PathIsDirectoryException(item.toString()); } FileChecksum checksum = item.fs.getFileChecksum(item.path); if (checksum == null) { out.printf("%s\tNONE\t%n", item.toString()); } else { String checksumString = StringUtils.byteToHexString( checksum.getBytes(), 0, checksum.getLength()); out.printf("%s\t%s\t%s%n", item.toString(), checksum.getAlgorithmName(), checksumString); } }
Example 4
Source File: Display.java From hadoop with Apache License 2.0 | 6 votes |
@Override protected void processPath(PathData item) throws IOException { if (item.stat.isDirectory()) { throw new PathIsDirectoryException(item.toString()); } FileChecksum checksum = item.fs.getFileChecksum(item.path); if (checksum == null) { out.printf("%s\tNONE\t%n", item.toString()); } else { String checksumString = StringUtils.byteToHexString( checksum.getBytes(), 0, checksum.getLength()); out.printf("%s\t%s\t%s%n", item.toString(), checksum.getAlgorithmName(), checksumString); } }
Example 5
Source File: Client.java From hadoop with Apache License 2.0 | 6 votes |
/** Check the rpc response header. */ void checkResponse(RpcResponseHeaderProto header) throws IOException { if (header == null) { throw new EOFException("Response is null."); } if (header.hasClientId()) { // check client IDs final byte[] id = header.getClientId().toByteArray(); if (!Arrays.equals(id, RpcConstants.DUMMY_CLIENT_ID)) { if (!Arrays.equals(id, clientId)) { throw new IOException("Client IDs not matched: local ID=" + StringUtils.byteToHexString(clientId) + ", ID in response=" + StringUtils.byteToHexString(header.getClientId().toByteArray())); } } } }
Example 6
Source File: DFSZKFailoverController.java From big-c with Apache License 2.0 | 6 votes |
@Override protected HAServiceTarget dataToTarget(byte[] data) { ActiveNodeInfo proto; try { proto = ActiveNodeInfo.parseFrom(data); } catch (InvalidProtocolBufferException e) { throw new RuntimeException("Invalid data in ZK: " + StringUtils.byteToHexString(data)); } NNHAServiceTarget ret = new NNHAServiceTarget( conf, proto.getNameserviceId(), proto.getNamenodeId()); InetSocketAddress addressFromProtobuf = new InetSocketAddress( proto.getHostname(), proto.getPort()); if (!addressFromProtobuf.equals(ret.getAddress())) { throw new RuntimeException("Mismatched address stored in ZK for " + ret + ": Stored protobuf was " + proto + ", address from our own " + "configuration for this NameNode was " + ret.getAddress()); } ret.setZkfcPort(proto.getZkfcPort()); return ret; }
Example 7
Source File: TestDelegationTokenRenewer.java From big-c with Apache License 2.0 | 5 votes |
public String toString() { StringBuilder sb = new StringBuilder(1024); sb.append("id="); String id = StringUtils.byteToHexString(this.getIdentifier()); int idLen = id.length(); sb.append(id.substring(idLen-6)); sb.append(";k="); sb.append(this.getKind()); sb.append(";s="); sb.append(this.getService()); return sb.toString(); }
Example 8
Source File: TestLocalFileSystem.java From big-c with Apache License 2.0 | 5 votes |
private void verifyRead(FSDataInputStream stm, byte[] fileContents, int seekOff, int toRead) throws IOException { byte[] out = new byte[toRead]; stm.seek(seekOff); stm.readFully(out); byte[] expected = Arrays.copyOfRange(fileContents, seekOff, seekOff+toRead); if (!Arrays.equals(out, expected)) { String s ="\nExpected: " + StringUtils.byteToHexString(expected) + "\ngot: " + StringUtils.byteToHexString(out) + "\noff=" + seekOff + " len=" + toRead; fail(s); } }
Example 9
Source File: ActiveStandbyElector.java From big-c with Apache License 2.0 | 5 votes |
@Override public String toString() { return "elector id=" + System.identityHashCode(this) + " appData=" + ((appData == null) ? "null" : StringUtils.byteToHexString(appData)) + " cb=" + appClient; }
Example 10
Source File: TestEditLog.java From big-c with Apache License 2.0 | 5 votes |
/** * Regression test for HDFS-1112/HDFS-3020. Ensures that, even if * logSync isn't called periodically, the edit log will sync itself. */ @Test public void testAutoSync() throws Exception { File logDir = new File(TEST_DIR, "testAutoSync"); logDir.mkdirs(); FSEditLog log = FSImageTestUtil.createStandaloneEditLog(logDir); String oneKB = StringUtils.byteToHexString( new byte[500]); try { log.openForWrite(); NameNodeMetrics mockMetrics = Mockito.mock(NameNodeMetrics.class); log.setMetricsForTests(mockMetrics); for (int i = 0; i < 400; i++) { log.logDelete(oneKB, 1L, false); } // After ~400KB, we're still within the 512KB buffer size Mockito.verify(mockMetrics, Mockito.times(0)).addSync(Mockito.anyLong()); // After ~400KB more, we should have done an automatic sync for (int i = 0; i < 400; i++) { log.logDelete(oneKB, 1L, false); } Mockito.verify(mockMetrics, Mockito.times(1)).addSync(Mockito.anyLong()); } finally { log.close(); } }
Example 11
Source File: TestDelegationTokenRenewer.java From hadoop with Apache License 2.0 | 5 votes |
public String toString() { StringBuilder sb = new StringBuilder(1024); sb.append("id="); String id = StringUtils.byteToHexString(this.getIdentifier()); int idLen = id.length(); sb.append(id.substring(idLen-6)); sb.append(";k="); sb.append(this.getKind()); sb.append(";s="); sb.append(this.getService()); return sb.toString(); }
Example 12
Source File: TestLocalFileSystem.java From hadoop with Apache License 2.0 | 5 votes |
private void verifyRead(FSDataInputStream stm, byte[] fileContents, int seekOff, int toRead) throws IOException { byte[] out = new byte[toRead]; stm.seek(seekOff); stm.readFully(out); byte[] expected = Arrays.copyOfRange(fileContents, seekOff, seekOff+toRead); if (!Arrays.equals(out, expected)) { String s ="\nExpected: " + StringUtils.byteToHexString(expected) + "\ngot: " + StringUtils.byteToHexString(out) + "\noff=" + seekOff + " len=" + toRead; fail(s); } }
Example 13
Source File: ActiveStandbyElector.java From hadoop with Apache License 2.0 | 5 votes |
@Override public String toString() { return "elector id=" + System.identityHashCode(this) + " appData=" + ((appData == null) ? "null" : StringUtils.byteToHexString(appData)) + " cb=" + appClient; }
Example 14
Source File: TestEditLog.java From hadoop with Apache License 2.0 | 5 votes |
/** * Regression test for HDFS-1112/HDFS-3020. Ensures that, even if * logSync isn't called periodically, the edit log will sync itself. */ @Test public void testAutoSync() throws Exception { File logDir = new File(TEST_DIR, "testAutoSync"); logDir.mkdirs(); FSEditLog log = FSImageTestUtil.createStandaloneEditLog(logDir); String oneKB = StringUtils.byteToHexString( new byte[500]); try { log.openForWrite(); NameNodeMetrics mockMetrics = Mockito.mock(NameNodeMetrics.class); log.setMetricsForTests(mockMetrics); for (int i = 0; i < 400; i++) { log.logDelete(oneKB, 1L, false); } // After ~400KB, we're still within the 512KB buffer size Mockito.verify(mockMetrics, Mockito.times(0)).addSync(Mockito.anyLong()); // After ~400KB more, we should have done an automatic sync for (int i = 0; i < 400; i++) { log.logDelete(oneKB, 1L, false); } Mockito.verify(mockMetrics, Mockito.times(1)).addSync(Mockito.anyLong()); } finally { log.close(); } }
Example 15
Source File: HdfsVolumeId.java From big-c with Apache License 2.0 | 4 votes |
@Override public String toString() { return StringUtils.byteToHexString(id); }
Example 16
Source File: UTF8.java From hadoop with Apache License 2.0 | 4 votes |
private static void readChars(DataInput in, StringBuilder buffer, int nBytes) throws UTFDataFormatException, IOException { DataOutputBuffer obuf = OBUF_FACTORY.get(); obuf.reset(); obuf.write(in, nBytes); byte[] bytes = obuf.getData(); int i = 0; while (i < nBytes) { byte b = bytes[i++]; if ((b & 0x80) == 0) { // 0b0xxxxxxx: 1-byte sequence buffer.append((char)(b & 0x7F)); } else if ((b & 0xE0) == 0xC0) { if (i >= nBytes) { throw new UTFDataFormatException("Truncated UTF8 at " + StringUtils.byteToHexString(bytes, i - 1, 1)); } // 0b110xxxxx: 2-byte sequence buffer.append((char)(((b & 0x1F) << 6) | (bytes[i++] & 0x3F))); } else if ((b & 0xF0) == 0xE0) { // 0b1110xxxx: 3-byte sequence if (i + 1 >= nBytes) { throw new UTFDataFormatException("Truncated UTF8 at " + StringUtils.byteToHexString(bytes, i - 1, 2)); } buffer.append((char)(((b & 0x0F) << 12) | ((bytes[i++] & 0x3F) << 6) | (bytes[i++] & 0x3F))); } else if ((b & 0xF8) == 0xF0) { if (i + 2 >= nBytes) { throw new UTFDataFormatException("Truncated UTF8 at " + StringUtils.byteToHexString(bytes, i - 1, 3)); } // 0b11110xxx: 4-byte sequence int codepoint = ((b & 0x07) << 18) | ((bytes[i++] & 0x3F) << 12) | ((bytes[i++] & 0x3F) << 6) | ((bytes[i++] & 0x3F)); buffer.append(highSurrogate(codepoint)) .append(lowSurrogate(codepoint)); } else { // The UTF8 standard describes 5-byte and 6-byte sequences, but // these are no longer allowed as of 2003 (see RFC 3629) // Only show the next 6 bytes max in the error code - in case the // buffer is large, this will prevent an exceedingly large message. int endForError = Math.min(i + 5, nBytes); throw new UTFDataFormatException("Invalid UTF8 at " + StringUtils.byteToHexString(bytes, i - 1, endForError)); } } }
Example 17
Source File: UTF8.java From big-c with Apache License 2.0 | 4 votes |
private static void readChars(DataInput in, StringBuilder buffer, int nBytes) throws UTFDataFormatException, IOException { DataOutputBuffer obuf = OBUF_FACTORY.get(); obuf.reset(); obuf.write(in, nBytes); byte[] bytes = obuf.getData(); int i = 0; while (i < nBytes) { byte b = bytes[i++]; if ((b & 0x80) == 0) { // 0b0xxxxxxx: 1-byte sequence buffer.append((char)(b & 0x7F)); } else if ((b & 0xE0) == 0xC0) { if (i >= nBytes) { throw new UTFDataFormatException("Truncated UTF8 at " + StringUtils.byteToHexString(bytes, i - 1, 1)); } // 0b110xxxxx: 2-byte sequence buffer.append((char)(((b & 0x1F) << 6) | (bytes[i++] & 0x3F))); } else if ((b & 0xF0) == 0xE0) { // 0b1110xxxx: 3-byte sequence if (i + 1 >= nBytes) { throw new UTFDataFormatException("Truncated UTF8 at " + StringUtils.byteToHexString(bytes, i - 1, 2)); } buffer.append((char)(((b & 0x0F) << 12) | ((bytes[i++] & 0x3F) << 6) | (bytes[i++] & 0x3F))); } else if ((b & 0xF8) == 0xF0) { if (i + 2 >= nBytes) { throw new UTFDataFormatException("Truncated UTF8 at " + StringUtils.byteToHexString(bytes, i - 1, 3)); } // 0b11110xxx: 4-byte sequence int codepoint = ((b & 0x07) << 18) | ((bytes[i++] & 0x3F) << 12) | ((bytes[i++] & 0x3F) << 6) | ((bytes[i++] & 0x3F)); buffer.append(highSurrogate(codepoint)) .append(lowSurrogate(codepoint)); } else { // The UTF8 standard describes 5-byte and 6-byte sequences, but // these are no longer allowed as of 2003 (see RFC 3629) // Only show the next 6 bytes max in the error code - in case the // buffer is large, this will prevent an exceedingly large message. int endForError = Math.min(i + 5, nBytes); throw new UTFDataFormatException("Invalid UTF8 at " + StringUtils.byteToHexString(bytes, i - 1, endForError)); } } }
Example 18
Source File: HdfsVolumeId.java From hadoop with Apache License 2.0 | 4 votes |
@Override public String toString() { return StringUtils.byteToHexString(id); }
Example 19
Source File: MD5FileUtils.java From big-c with Apache License 2.0 | 2 votes |
/** * Save the ".md5" file that lists the md5sum of another file. * @param dataFile the original file whose md5 was computed * @param digest the computed digest * @throws IOException */ public static void saveMD5File(File dataFile, MD5Hash digest) throws IOException { final String digestString = StringUtils.byteToHexString(digest.getDigest()); saveMD5File(dataFile, digestString); }
Example 20
Source File: MD5FileUtils.java From hadoop with Apache License 2.0 | 2 votes |
/** * Save the ".md5" file that lists the md5sum of another file. * @param dataFile the original file whose md5 was computed * @param digest the computed digest * @throws IOException */ public static void saveMD5File(File dataFile, MD5Hash digest) throws IOException { final String digestString = StringUtils.byteToHexString(digest.getDigest()); saveMD5File(dataFile, digestString); }