org.apache.hadoop.hdfs.server.namenode.FsImageProto Java Examples
The following examples show how to use
org.apache.hadoop.hdfs.server.namenode.FsImageProto.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: PBImageTextWriter.java From hadoop with Apache License 2.0 | 6 votes |
/** * Scan the INodeDirectory section to construct the namespace. */ private void buildNamespace(InputStream in, List<Long> refIdList) throws IOException { int count = 0; while (true) { FsImageProto.INodeDirectorySection.DirEntry e = FsImageProto.INodeDirectorySection.DirEntry.parseDelimitedFrom(in); if (e == null) { break; } count++; if (LOG.isDebugEnabled() && count % 10000 == 0) { LOG.debug("Scanned {} directories.", count); } long parentId = e.getParent(); for (int i = 0; i < e.getChildrenCount(); i++) { long childId = e.getChildren(i); metadataMap.putDirChild(parentId, childId); } for (int i = e.getChildrenCount(); i < e.getChildrenCount() + e.getRefChildrenCount(); i++) { int refId = e.getRefChildren(i - e.getChildrenCount()); metadataMap.putDirChild(parentId, refIdList.get(refId)); } } LOG.info("Scanned {} INode directories to build namespace.", count); }
Example #2
Source File: FSImageLoader.java From big-c with Apache License 2.0 | 6 votes |
private FsImageProto.INodeSection.INode fromINodeId(final long id) throws IOException { int l = 0, r = inodes.length; while (l < r) { int mid = l + (r - l) / 2; FsImageProto.INodeSection.INode n = FsImageProto.INodeSection.INode .parseFrom(inodes[mid]); long nid = n.getId(); if (id > nid) { l = mid + 1; } else if (id < nid) { r = mid; } else { return n; } } return null; }
Example #3
Source File: FSImageLoader.java From big-c with Apache License 2.0 | 6 votes |
private PermissionStatus getPermissionStatus(String path) throws IOException { long id = lookup(path); FsImageProto.INodeSection.INode inode = fromINodeId(id); switch (inode.getType()) { case FILE: { FsImageProto.INodeSection.INodeFile f = inode.getFile(); return FSImageFormatPBINode.Loader.loadPermission( f.getPermission(), stringTable); } case DIRECTORY: { FsImageProto.INodeSection.INodeDirectory d = inode.getDirectory(); return FSImageFormatPBINode.Loader.loadPermission( d.getPermission(), stringTable); } case SYMLINK: { FsImageProto.INodeSection.INodeSymlink s = inode.getSymlink(); return FSImageFormatPBINode.Loader.loadPermission( s.getPermission(), stringTable); } default: { return null; } } }
Example #4
Source File: FSImageLoader.java From big-c with Apache License 2.0 | 6 votes |
private List<AclEntry> getAclEntryList(String path) throws IOException { long id = lookup(path); FsImageProto.INodeSection.INode inode = fromINodeId(id); switch (inode.getType()) { case FILE: { FsImageProto.INodeSection.INodeFile f = inode.getFile(); return FSImageFormatPBINode.Loader.loadAclEntries( f.getAcl(), stringTable); } case DIRECTORY: { FsImageProto.INodeSection.INodeDirectory d = inode.getDirectory(); return FSImageFormatPBINode.Loader.loadAclEntries( d.getAcl(), stringTable); } default: { return new ArrayList<AclEntry>(); } } }
Example #5
Source File: FSImageLoader.java From big-c with Apache License 2.0 | 6 votes |
private List<Map<String, Object>> getFileStatusList(String path) throws IOException { List<Map<String, Object>> list = new ArrayList<Map<String, Object>>(); long id = lookup(path); FsImageProto.INodeSection.INode inode = fromINodeId(id); if (inode.getType() == FsImageProto.INodeSection.INode.Type.DIRECTORY) { if (!dirmap.containsKey(id)) { // if the directory is empty, return empty list return list; } long[] children = dirmap.get(id); for (long cid : children) { list.add(getFileStatus(fromINodeId(cid), true)); } } else { list.add(getFileStatus(inode, false)); } return list; }
Example #6
Source File: FSImageLoader.java From big-c with Apache License 2.0 | 6 votes |
private static byte[][] loadINodeSection(InputStream in) throws IOException { FsImageProto.INodeSection s = FsImageProto.INodeSection .parseDelimitedFrom(in); LOG.info("Loading " + s.getNumInodes() + " inodes."); final byte[][] inodes = new byte[(int) s.getNumInodes()][]; for (int i = 0; i < s.getNumInodes(); ++i) { int size = CodedInputStream.readRawVarint32(in.read(), in); byte[] bytes = new byte[size]; IOUtils.readFully(in, bytes, 0, size); inodes[i] = bytes; } LOG.debug("Sorting inodes"); Arrays.sort(inodes, INODE_BYTES_COMPARATOR); LOG.debug("Finished sorting inodes"); return inodes; }
Example #7
Source File: FSImageLoader.java From big-c with Apache License 2.0 | 6 votes |
private static ImmutableList<Long> loadINodeReferenceSection(InputStream in) throws IOException { LOG.info("Loading inode references"); ImmutableList.Builder<Long> builder = ImmutableList.builder(); long counter = 0; while (true) { FsImageProto.INodeReferenceSection.INodeReference e = FsImageProto.INodeReferenceSection.INodeReference .parseDelimitedFrom(in); if (e == null) { break; } ++counter; builder.add(e.getReferredId()); } LOG.info("Loaded " + counter + " inode references"); return builder.build(); }
Example #8
Source File: FSImageLoader.java From big-c with Apache License 2.0 | 6 votes |
@Override public int compare(byte[] o1, byte[] o2) { try { final FsImageProto.INodeSection.INode l = FsImageProto.INodeSection .INode.parseFrom(o1); final FsImageProto.INodeSection.INode r = FsImageProto.INodeSection .INode.parseFrom(o2); if (l.getId() < r.getId()) { return -1; } else if (l.getId() > r.getId()) { return 1; } else { return 0; } } catch (InvalidProtocolBufferException e) { throw new RuntimeException(e); } }
Example #9
Source File: PBImageTextWriter.java From big-c with Apache License 2.0 | 6 votes |
/** * Scan the INodeDirectory section to construct the namespace. */ private void buildNamespace(InputStream in) throws IOException { int count = 0; while (true) { FsImageProto.INodeDirectorySection.DirEntry e = FsImageProto.INodeDirectorySection.DirEntry.parseDelimitedFrom(in); if (e == null) { break; } count++; if (LOG.isDebugEnabled() && count % 10000 == 0) { LOG.debug("Scanned {} directories.", count); } long parentId = e.getParent(); // Referred INode is not support for now. for (int i = 0; i < e.getChildrenCount(); i++) { long childId = e.getChildren(i); metadataMap.putDirChild(parentId, childId); } Preconditions.checkState(e.getRefChildrenCount() == 0); } LOG.info("Scanned {} INode directories to build namespace.", count); }
Example #10
Source File: FSImageLoader.java From hadoop with Apache License 2.0 | 6 votes |
private FsImageProto.INodeSection.INode fromINodeId(final long id) throws IOException { int l = 0, r = inodes.length; while (l < r) { int mid = l + (r - l) / 2; FsImageProto.INodeSection.INode n = FsImageProto.INodeSection.INode .parseFrom(inodes[mid]); long nid = n.getId(); if (id > nid) { l = mid + 1; } else if (id < nid) { r = mid; } else { return n; } } return null; }
Example #11
Source File: FSImageLoader.java From hadoop with Apache License 2.0 | 6 votes |
private PermissionStatus getPermissionStatus(String path) throws IOException { long id = lookup(path); FsImageProto.INodeSection.INode inode = fromINodeId(id); switch (inode.getType()) { case FILE: { FsImageProto.INodeSection.INodeFile f = inode.getFile(); return FSImageFormatPBINode.Loader.loadPermission( f.getPermission(), stringTable); } case DIRECTORY: { FsImageProto.INodeSection.INodeDirectory d = inode.getDirectory(); return FSImageFormatPBINode.Loader.loadPermission( d.getPermission(), stringTable); } case SYMLINK: { FsImageProto.INodeSection.INodeSymlink s = inode.getSymlink(); return FSImageFormatPBINode.Loader.loadPermission( s.getPermission(), stringTable); } default: { return null; } } }
Example #12
Source File: FSImageLoader.java From hadoop with Apache License 2.0 | 6 votes |
private List<AclEntry> getAclEntryList(String path) throws IOException { long id = lookup(path); FsImageProto.INodeSection.INode inode = fromINodeId(id); switch (inode.getType()) { case FILE: { FsImageProto.INodeSection.INodeFile f = inode.getFile(); return FSImageFormatPBINode.Loader.loadAclEntries( f.getAcl(), stringTable); } case DIRECTORY: { FsImageProto.INodeSection.INodeDirectory d = inode.getDirectory(); return FSImageFormatPBINode.Loader.loadAclEntries( d.getAcl(), stringTable); } default: { return new ArrayList<AclEntry>(); } } }
Example #13
Source File: FSImageLoader.java From hadoop with Apache License 2.0 | 6 votes |
static ImmutableList<Long> loadINodeReferenceSection(InputStream in) throws IOException { LOG.info("Loading inode references"); ImmutableList.Builder<Long> builder = ImmutableList.builder(); long counter = 0; while (true) { FsImageProto.INodeReferenceSection.INodeReference e = FsImageProto.INodeReferenceSection.INodeReference .parseDelimitedFrom(in); if (e == null) { break; } ++counter; builder.add(e.getReferredId()); } LOG.info("Loaded " + counter + " inode references"); return builder.build(); }
Example #14
Source File: FSImageLoader.java From hadoop with Apache License 2.0 | 6 votes |
private List<Map<String, Object>> getFileStatusList(String path) throws IOException { List<Map<String, Object>> list = new ArrayList<Map<String, Object>>(); long id = lookup(path); FsImageProto.INodeSection.INode inode = fromINodeId(id); if (inode.getType() == FsImageProto.INodeSection.INode.Type.DIRECTORY) { if (!dirmap.containsKey(id)) { // if the directory is empty, return empty list return list; } long[] children = dirmap.get(id); for (long cid : children) { list.add(getFileStatus(fromINodeId(cid), true)); } } else { list.add(getFileStatus(inode, false)); } return list; }
Example #15
Source File: FSImageLoader.java From hadoop with Apache License 2.0 | 6 votes |
private static byte[][] loadINodeSection(InputStream in) throws IOException { FsImageProto.INodeSection s = FsImageProto.INodeSection .parseDelimitedFrom(in); LOG.info("Loading " + s.getNumInodes() + " inodes."); final byte[][] inodes = new byte[(int) s.getNumInodes()][]; for (int i = 0; i < s.getNumInodes(); ++i) { int size = CodedInputStream.readRawVarint32(in.read(), in); byte[] bytes = new byte[size]; IOUtils.readFully(in, bytes, 0, size); inodes[i] = bytes; } LOG.debug("Sorting inodes"); Arrays.sort(inodes, INODE_BYTES_COMPARATOR); LOG.debug("Finished sorting inodes"); return inodes; }
Example #16
Source File: FSImageLoader.java From hadoop with Apache License 2.0 | 6 votes |
@Override public int compare(byte[] o1, byte[] o2) { try { final FsImageProto.INodeSection.INode l = FsImageProto.INodeSection .INode.parseFrom(o1); final FsImageProto.INodeSection.INode r = FsImageProto.INodeSection .INode.parseFrom(o2); if (l.getId() < r.getId()) { return -1; } else if (l.getId() > r.getId()) { return 1; } else { return 0; } } catch (InvalidProtocolBufferException e) { throw new RuntimeException(e); } }
Example #17
Source File: FSImageLoader.java From big-c with Apache License 2.0 | 5 votes |
/** * Return the INodeId of the specified path. */ private long lookup(String path) throws IOException { Preconditions.checkArgument(path.startsWith("/")); long id = INodeId.ROOT_INODE_ID; for (int offset = 0, next; offset < path.length(); offset = next) { next = path.indexOf('/', offset + 1); if (next == -1) { next = path.length(); } if (offset + 1 > next) { break; } final String component = path.substring(offset + 1, next); if (component.isEmpty()) { continue; } final long[] children = dirmap.get(id); if (children == null) { throw new FileNotFoundException(path); } boolean found = false; for (long cid : children) { FsImageProto.INodeSection.INode child = fromINodeId(cid); if (component.equals(child.getName().toStringUtf8())) { found = true; id = child.getId(); break; } } if (!found) { throw new FileNotFoundException(path); } } return id; }
Example #18
Source File: FSImageLoader.java From hadoop with Apache License 2.0 | 5 votes |
private static Map<Long, long[]> loadINodeDirectorySection (InputStream in, List<Long> refIdList) throws IOException { LOG.info("Loading inode directory section"); Map<Long, long[]> dirs = Maps.newHashMap(); long counter = 0; while (true) { FsImageProto.INodeDirectorySection.DirEntry e = FsImageProto.INodeDirectorySection.DirEntry.parseDelimitedFrom(in); // note that in is a LimitedInputStream if (e == null) { break; } ++counter; long[] l = new long[e.getChildrenCount() + e.getRefChildrenCount()]; for (int i = 0; i < e.getChildrenCount(); ++i) { l[i] = e.getChildren(i); } for (int i = e.getChildrenCount(); i < l.length; i++) { int refId = e.getRefChildren(i - e.getChildrenCount()); l[i] = refIdList.get(refId); } dirs.put(e.getParent(), l); } LOG.info("Loaded " + counter + " directories"); return dirs; }
Example #19
Source File: FSImageLoader.java From big-c with Apache License 2.0 | 5 votes |
/** * Return the JSON formatted FileStatus of the specified file. * @param path a path specifies a file * @return JSON formatted FileStatus * @throws IOException if failed to serialize fileStatus to JSON. */ String getFileStatus(String path) throws IOException { ObjectMapper mapper = new ObjectMapper(); FsImageProto.INodeSection.INode inode = fromINodeId(lookup(path)); return "{\"FileStatus\":\n" + mapper.writeValueAsString(getFileStatus(inode, false)) + "\n}\n"; }
Example #20
Source File: FSImageLoader.java From big-c with Apache License 2.0 | 5 votes |
static String[] loadStringTable(InputStream in) throws IOException { FsImageProto.StringTableSection s = FsImageProto.StringTableSection .parseDelimitedFrom(in); LOG.info("Loading " + s.getNumEntry() + " strings"); String[] stringTable = new String[s.getNumEntry() + 1]; for (int i = 0; i < s.getNumEntry(); ++i) { FsImageProto.StringTableSection.Entry e = FsImageProto .StringTableSection.Entry.parseDelimitedFrom(in); stringTable[e.getId()] = e.getStr(); } return stringTable; }
Example #21
Source File: FSImageLoader.java From big-c with Apache License 2.0 | 5 votes |
private static Map<Long, long[]> loadINodeDirectorySection (InputStream in, List<Long> refIdList) throws IOException { LOG.info("Loading inode directory section"); Map<Long, long[]> dirs = Maps.newHashMap(); long counter = 0; while (true) { FsImageProto.INodeDirectorySection.DirEntry e = FsImageProto.INodeDirectorySection.DirEntry.parseDelimitedFrom(in); // note that in is a LimitedInputStream if (e == null) { break; } ++counter; long[] l = new long[e.getChildrenCount() + e.getRefChildrenCount()]; for (int i = 0; i < e.getChildrenCount(); ++i) { l[i] = e.getChildren(i); } for (int i = e.getChildrenCount(); i < l.length; i++) { int refId = e.getRefChildren(i - e.getChildrenCount()); l[i] = refIdList.get(refId); } dirs.put(e.getParent(), l); } LOG.info("Loaded " + counter + " directories"); return dirs; }
Example #22
Source File: FSImageLoader.java From hadoop with Apache License 2.0 | 5 votes |
static String[] loadStringTable(InputStream in) throws IOException { FsImageProto.StringTableSection s = FsImageProto.StringTableSection .parseDelimitedFrom(in); LOG.info("Loading " + s.getNumEntry() + " strings"); String[] stringTable = new String[s.getNumEntry() + 1]; for (int i = 0; i < s.getNumEntry(); ++i) { FsImageProto.StringTableSection.Entry e = FsImageProto .StringTableSection.Entry.parseDelimitedFrom(in); stringTable[e.getId()] = e.getStr(); } return stringTable; }
Example #23
Source File: FSImageLoader.java From hadoop with Apache License 2.0 | 5 votes |
/** * Return the JSON formatted FileStatus of the specified file. * @param path a path specifies a file * @return JSON formatted FileStatus * @throws IOException if failed to serialize fileStatus to JSON. */ String getFileStatus(String path) throws IOException { ObjectMapper mapper = new ObjectMapper(); FsImageProto.INodeSection.INode inode = fromINodeId(lookup(path)); return "{\"FileStatus\":\n" + mapper.writeValueAsString(getFileStatus(inode, false)) + "\n}\n"; }
Example #24
Source File: FSImageLoader.java From big-c with Apache License 2.0 | 5 votes |
static long getFileSize(FsImageProto.INodeSection.INodeFile f) { long size = 0; for (HdfsProtos.BlockProto p : f.getBlocksList()) { size += p.getNumBytes(); } return size; }
Example #25
Source File: FSImageLoader.java From hadoop with Apache License 2.0 | 5 votes |
private List<XAttr> getXAttrList(String path) throws IOException { long id = lookup(path); FsImageProto.INodeSection.INode inode = fromINodeId(id); switch (inode.getType()) { case FILE: return FSImageFormatPBINode.Loader.loadXAttrs( inode.getFile().getXAttrs(), stringTable); case DIRECTORY: return FSImageFormatPBINode.Loader.loadXAttrs(inode.getDirectory() .getXAttrs(), stringTable); default: return null; } }
Example #26
Source File: FsImageReporter.java From hadoop-hdfs-fsimage-exporter with Apache License 2.0 | 5 votes |
@Override public void onFile(FsImageProto.INodeSection.INode inode, String path) { FsImageProto.INodeSection.INodeFile f = inode.getFile(); pathStats.sumBlocks.add(f.getBlocksCount()); final long fileSize = FSImageLoader.getFileSize(f); pathStats.fileSize.observe(fileSize); }
Example #27
Source File: FSImageLoader.java From hadoop with Apache License 2.0 | 5 votes |
static long getFileSize(FsImageProto.INodeSection.INodeFile f) { long size = 0; for (HdfsProtos.BlockProto p : f.getBlocksList()) { size += p.getNumBytes(); } return size; }
Example #28
Source File: FSImageLoader.java From hadoop with Apache License 2.0 | 5 votes |
/** * Return the INodeId of the specified path. */ private long lookup(String path) throws IOException { Preconditions.checkArgument(path.startsWith("/")); long id = INodeId.ROOT_INODE_ID; for (int offset = 0, next; offset < path.length(); offset = next) { next = path.indexOf('/', offset + 1); if (next == -1) { next = path.length(); } if (offset + 1 > next) { break; } final String component = path.substring(offset + 1, next); if (component.isEmpty()) { continue; } final long[] children = dirmap.get(id); if (children == null) { throw new FileNotFoundException(path); } boolean found = false; for (long cid : children) { FsImageProto.INodeSection.INode child = fromINodeId(cid); if (component.equals(child.getName().toStringUtf8())) { found = true; id = child.getId(); break; } } if (!found) { throw new FileNotFoundException(path); } } return id; }
Example #29
Source File: FSImageLoader.java From hadoop with Apache License 2.0 | 5 votes |
private Map<String, Object> getContentSummaryMap(String path) throws IOException { long id = lookup(path); INode inode = fromINodeId(id); long spaceQuota = 0; long nsQuota = 0; long[] data = new long[4]; FsImageProto.INodeSection.INodeFile f = inode.getFile(); switch (inode.getType()) { case FILE: data[0] = 0; data[1] = 1; data[2] = getFileSize(f); nsQuota = -1; data[3] = data[2] * f.getReplication(); spaceQuota = -1; return fillSummaryMap(spaceQuota, nsQuota, data); case DIRECTORY: fillDirSummary(id, data); nsQuota = inode.getDirectory().getNsQuota(); spaceQuota = inode.getDirectory().getDsQuota(); return fillSummaryMap(spaceQuota, nsQuota, data); case SYMLINK: data[0] = 0; data[1] = 1; data[2] = 0; nsQuota = -1; data[3] = 0; spaceQuota = -1; return fillSummaryMap(spaceQuota, nsQuota, data); default: return null; } }
Example #30
Source File: FSImageLoader.java From hadoop with Apache License 2.0 | 5 votes |
private void fillDirSummary(long id, long[] data) throws IOException { data[0]++; long[] children = dirmap.get(id); if (children == null) { return; } for (long cid : children) { INode node = fromINodeId(cid); switch (node.getType()) { case DIRECTORY: fillDirSummary(cid, data); break; case FILE: FsImageProto.INodeSection.INodeFile f = node.getFile(); long curLength = getFileSize(f); data[1]++; data[2] += curLength; data[3] += (curLength) * (f.getReplication()); break; case SYMLINK: data[1]++; break; default: break; } } }