Java Code Examples for org.apache.hadoop.hdfs.util.LightWeightHashSet#add()
The following examples show how to use
org.apache.hadoop.hdfs.util.LightWeightHashSet#add() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: InvalidateBlocks.java From hadoop with Apache License 2.0 | 6 votes |
/** * Add a block to the block collection * which will be invalidated on the specified datanode. */ synchronized void add(final Block block, final DatanodeInfo datanode, final boolean log) { LightWeightHashSet<Block> set = node2blocks.get(datanode); if (set == null) { set = new LightWeightHashSet<Block>(); node2blocks.put(datanode, set); } if (set.add(block)) { numBlocks++; if (log) { NameNode.blockStateChangeLog.info("BLOCK* {}: add {} to {}", getClass().getSimpleName(), block, datanode); } } }
Example 2
Source File: InvalidateBlocks.java From big-c with Apache License 2.0 | 6 votes |
/** * Add a block to the block collection * which will be invalidated on the specified datanode. */ synchronized void add(final Block block, final DatanodeInfo datanode, final boolean log) { LightWeightHashSet<Block> set = node2blocks.get(datanode); if (set == null) { set = new LightWeightHashSet<Block>(); node2blocks.put(datanode, set); } if (set.add(block)) { numBlocks++; if (log) { NameNode.blockStateChangeLog.info("BLOCK* {}: add {} to {}", getClass().getSimpleName(), block, datanode); } } }
Example 3
Source File: FSDataset.java From RDFS with Apache License 2.0 | 6 votes |
/** * Populate the given blockSet with any child blocks * found at this node. */ public void getBlockInfo(LightWeightHashSet<Block> blockSet) { FSDir[] children = this.getChildren(); if (children != null) { for (int i = 0; i < children.length; i++) { children[i].getBlockInfo(blockSet); } } File blockFiles[] = dir.listFiles(); String[] blockFilesNames = getFileNames(blockFiles); for (int i = 0; i < blockFiles.length; i++) { if (Block.isBlockFilename(blockFilesNames[i])) { long genStamp = FSDataset.getGenerationStampFromFile(blockFilesNames, blockFilesNames[i]); blockSet.add(new Block(blockFiles[i], blockFiles[i].length(), genStamp)); } } }
Example 4
Source File: FSDataset.java From RDFS with Apache License 2.0 | 6 votes |
/** * Populate the given blockSet with any child blocks * found at this node. With each block, return the full path * of the block file. */ void getBlockAndFileInfo(LightWeightHashSet<BlockAndFile> blockSet) { FSDir[] children = this.getChildren(); if (children != null) { for (int i = 0; i < children.length; i++) { children[i].getBlockAndFileInfo(blockSet); } } File blockFiles[] = dir.listFiles(); String[] blockFilesNames = getFileNames(blockFiles); for (int i = 0; i < blockFiles.length; i++) { if (Block.isBlockFilename(blockFilesNames[i])) { long genStamp = FSDataset.getGenerationStampFromFile(blockFilesNames, blockFilesNames[i]); Block block = new Block(blockFiles[i], blockFiles[i].length(), genStamp); blockSet.add(new BlockAndFile(blockFiles[i].getAbsoluteFile(), block)); } } }
Example 5
Source File: FSDataset.java From RDFS with Apache License 2.0 | 5 votes |
void getBlocksBeingWrittenInfo(LightWeightHashSet<Block> blockSet) { if (rbwDir == null) { return; } File[] blockFiles = rbwDir.listFiles(); if (blockFiles == null) { return; } String[] blockFileNames = getFileNames(blockFiles); for (int i = 0; i < blockFiles.length; i++) { if (!blockFiles[i].isDirectory()) { // get each block in the rbwDir direcotry if (Block.isBlockFilename(blockFileNames[i])) { long genStamp = FSDataset.getGenerationStampFromFile( blockFileNames, blockFileNames[i]); Block block = new Block(blockFiles[i], blockFiles[i].length(), genStamp); // add this block to block set blockSet.add(block); if (DataNode.LOG.isDebugEnabled()) { DataNode.LOG.debug("recoverBlocksBeingWritten for block " + block); } } } } }