org.apache.hadoop.fs.CacheFlag Java Examples
The following examples show how to use
org.apache.hadoop.fs.CacheFlag.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: FSNDNCacheOp.java From big-c with Apache License 2.0 | 6 votes |
static CacheDirectiveInfo addCacheDirective( FSNamesystem fsn, CacheManager cacheManager, CacheDirectiveInfo directive, EnumSet<CacheFlag> flags, boolean logRetryCache) throws IOException { final FSPermissionChecker pc = getFsPermissionChecker(fsn); if (directive.getId() != null) { throw new IOException("addDirective: you cannot specify an ID " + "for this operation."); } CacheDirectiveInfo effectiveDirective = cacheManager.addDirective(directive, pc, flags); fsn.getEditLog().logAddCacheDirectiveInfo(effectiveDirective, logRetryCache); return effectiveDirective; }
Example #2
Source File: TestRetryCacheWithHA.java From big-c with Apache License 2.0 | 6 votes |
/** * Add a list of cache directives, list cache directives, * switch active NN, and list cache directives again. */ @Test (timeout=60000) public void testListCacheDirectives() throws Exception { final int poolCount = 7; HashSet<String> poolNames = new HashSet<String>(poolCount); Path path = new Path("/p"); for (int i=0; i<poolCount; i++) { String poolName = "testListCacheDirectives-" + i; CacheDirectiveInfo directiveInfo = new CacheDirectiveInfo.Builder().setPool(poolName).setPath(path).build(); dfs.addCachePool(new CachePoolInfo(poolName)); dfs.addCacheDirective(directiveInfo, EnumSet.of(CacheFlag.FORCE)); poolNames.add(poolName); } listCacheDirectives(poolNames, 0); cluster.transitionToStandby(0); cluster.transitionToActive(1); cluster.waitActive(1); listCacheDirectives(poolNames, 1); }
Example #3
Source File: NameNodeRpcServer.java From big-c with Apache License 2.0 | 6 votes |
@Override // ClientProtocol public long addCacheDirective( CacheDirectiveInfo path, EnumSet<CacheFlag> flags) throws IOException { checkNNStartup(); CacheEntryWithPayload cacheEntry = RetryCache.waitForCompletion (retryCache, null); if (cacheEntry != null && cacheEntry.isSuccess()) { return (Long) cacheEntry.getPayload(); } boolean success = false; long ret = 0; try { ret = namesystem.addCacheDirective(path, flags, cacheEntry != null); success = true; } finally { RetryCache.setState(cacheEntry, success, ret); } return ret; }
Example #4
Source File: TestRetryCacheWithHA.java From hadoop with Apache License 2.0 | 6 votes |
/** * Add a list of cache directives, list cache directives, * switch active NN, and list cache directives again. */ @Test (timeout=60000) public void testListCacheDirectives() throws Exception { final int poolCount = 7; HashSet<String> poolNames = new HashSet<String>(poolCount); Path path = new Path("/p"); for (int i=0; i<poolCount; i++) { String poolName = "testListCacheDirectives-" + i; CacheDirectiveInfo directiveInfo = new CacheDirectiveInfo.Builder().setPool(poolName).setPath(path).build(); dfs.addCachePool(new CachePoolInfo(poolName)); dfs.addCacheDirective(directiveInfo, EnumSet.of(CacheFlag.FORCE)); poolNames.add(poolName); } listCacheDirectives(poolNames, 0); cluster.transitionToStandby(0); cluster.transitionToActive(1); cluster.waitActive(1); listCacheDirectives(poolNames, 1); }
Example #5
Source File: NameNodeRpcServer.java From big-c with Apache License 2.0 | 6 votes |
@Override // ClientProtocol public void modifyCacheDirective( CacheDirectiveInfo directive, EnumSet<CacheFlag> flags) throws IOException { checkNNStartup(); CacheEntry cacheEntry = RetryCache.waitForCompletion(retryCache); if (cacheEntry != null && cacheEntry.isSuccess()) { return; } boolean success = false; try { namesystem.modifyCacheDirective(directive, flags, cacheEntry != null); success = true; } finally { RetryCache.setState(cacheEntry, success); } }
Example #6
Source File: FSNDNCacheOp.java From hadoop with Apache License 2.0 | 6 votes |
static CacheDirectiveInfo addCacheDirective( FSNamesystem fsn, CacheManager cacheManager, CacheDirectiveInfo directive, EnumSet<CacheFlag> flags, boolean logRetryCache) throws IOException { final FSPermissionChecker pc = getFsPermissionChecker(fsn); if (directive.getId() != null) { throw new IOException("addDirective: you cannot specify an ID " + "for this operation."); } CacheDirectiveInfo effectiveDirective = cacheManager.addDirective(directive, pc, flags); fsn.getEditLog().logAddCacheDirectiveInfo(effectiveDirective, logRetryCache); return effectiveDirective; }
Example #7
Source File: NameNodeRpcServer.java From hadoop with Apache License 2.0 | 6 votes |
@Override // ClientProtocol public long addCacheDirective( CacheDirectiveInfo path, EnumSet<CacheFlag> flags) throws IOException { checkNNStartup(); CacheEntryWithPayload cacheEntry = RetryCache.waitForCompletion (retryCache, null); if (cacheEntry != null && cacheEntry.isSuccess()) { return (Long) cacheEntry.getPayload(); } boolean success = false; long ret = 0; try { ret = namesystem.addCacheDirective(path, flags, cacheEntry != null); success = true; } finally { RetryCache.setState(cacheEntry, success, ret); } return ret; }
Example #8
Source File: NameNodeRpcServer.java From hadoop with Apache License 2.0 | 6 votes |
@Override // ClientProtocol public void modifyCacheDirective( CacheDirectiveInfo directive, EnumSet<CacheFlag> flags) throws IOException { checkNNStartup(); CacheEntry cacheEntry = RetryCache.waitForCompletion(retryCache); if (cacheEntry != null && cacheEntry.isSuccess()) { return; } boolean success = false; try { namesystem.modifyCacheDirective(directive, flags, cacheEntry != null); success = true; } finally { RetryCache.setState(cacheEntry, success); } }
Example #9
Source File: ClientNamenodeProtocolTranslatorPB.java From big-c with Apache License 2.0 | 5 votes |
@Override public long addCacheDirective(CacheDirectiveInfo directive, EnumSet<CacheFlag> flags) throws IOException { try { AddCacheDirectiveRequestProto.Builder builder = AddCacheDirectiveRequestProto.newBuilder(). setInfo(PBHelper.convert(directive)); if (!flags.isEmpty()) { builder.setCacheFlags(PBHelper.convertCacheFlags(flags)); } return rpcProxy.addCacheDirective(null, builder.build()).getId(); } catch (ServiceException e) { throw ProtobufHelper.getRemoteException(e); } }
Example #10
Source File: ClientNamenodeProtocolTranslatorPB.java From hadoop with Apache License 2.0 | 5 votes |
@Override public long addCacheDirective(CacheDirectiveInfo directive, EnumSet<CacheFlag> flags) throws IOException { try { AddCacheDirectiveRequestProto.Builder builder = AddCacheDirectiveRequestProto.newBuilder(). setInfo(PBHelper.convert(directive)); if (!flags.isEmpty()) { builder.setCacheFlags(PBHelper.convertCacheFlags(flags)); } return rpcProxy.addCacheDirective(null, builder.build()).getId(); } catch (ServiceException e) { throw ProtobufHelper.getRemoteException(e); } }
Example #11
Source File: TestRetryCacheWithHA.java From hadoop with Apache License 2.0 | 5 votes |
@Override void invoke() throws Exception { client.modifyCacheDirective( new CacheDirectiveInfo.Builder(). setId(id). setReplication(newReplication). build(), EnumSet.of(CacheFlag.FORCE)); }
Example #12
Source File: PBHelper.java From big-c with Apache License 2.0 | 5 votes |
public static EnumSet<CacheFlag> convertCacheFlags(int flags) { EnumSet<CacheFlag> result = EnumSet.noneOf(CacheFlag.class); if ((flags & CacheFlagProto.FORCE_VALUE) == CacheFlagProto.FORCE_VALUE) { result.add(CacheFlag.FORCE); } return result; }
Example #13
Source File: PBHelper.java From big-c with Apache License 2.0 | 5 votes |
public static int convertCacheFlags(EnumSet<CacheFlag> flags) { int value = 0; if (flags.contains(CacheFlag.FORCE)) { value |= CacheFlagProto.FORCE.getNumber(); } return value; }
Example #14
Source File: CacheRegistry.java From nnproxy with Apache License 2.0 | 5 votes |
public long addCacheDirective(CacheDirectiveInfo directive, EnumSet<CacheFlag> flags) throws IOException { UpstreamManager.Upstream upstream = getUpstream(directive.getPool()); long id = maskDirectiveId(upstream.protocol.addCacheDirective(directive, flags), upstream.fsIndex); reloadThread.interrupt(); return id; }
Example #15
Source File: CacheManager.java From big-c with Apache License 2.0 | 5 votes |
public CacheDirectiveInfo addDirective( CacheDirectiveInfo info, FSPermissionChecker pc, EnumSet<CacheFlag> flags) throws IOException { assert namesystem.hasWriteLock(); CacheDirective directive; try { CachePool pool = getCachePool(validatePoolName(info)); checkWritePermission(pc, pool); String path = validatePath(info); short replication = validateReplication(info, (short)1); long expiryTime = validateExpiryTime(info, pool.getMaxRelativeExpiryMs()); // Do quota validation if required if (!flags.contains(CacheFlag.FORCE)) { checkLimit(pool, path, replication); } // All validation passed // Add a new entry with the next available ID. long id = getNextDirectiveId(); directive = new CacheDirective(id, path, replication, expiryTime); addInternal(directive, pool); } catch (IOException e) { LOG.warn("addDirective of " + info + " failed: ", e); throw e; } LOG.info("addDirective of {} successful.", info); return directive.toInfo(); }
Example #16
Source File: DFSClient.java From big-c with Apache License 2.0 | 5 votes |
public long addCacheDirective( CacheDirectiveInfo info, EnumSet<CacheFlag> flags) throws IOException { checkOpen(); TraceScope scope = Trace.startSpan("addCacheDirective", traceSampler); try { return namenode.addCacheDirective(info, flags); } catch (RemoteException re) { throw re.unwrapRemoteException(); } finally { scope.close(); } }
Example #17
Source File: DFSClient.java From big-c with Apache License 2.0 | 5 votes |
public void modifyCacheDirective( CacheDirectiveInfo info, EnumSet<CacheFlag> flags) throws IOException { checkOpen(); TraceScope scope = Trace.startSpan("modifyCacheDirective", traceSampler); try { namenode.modifyCacheDirective(info, flags); } catch (RemoteException re) { throw re.unwrapRemoteException(); } finally { scope.close(); } }
Example #18
Source File: FSNDNCacheOp.java From big-c with Apache License 2.0 | 5 votes |
static void modifyCacheDirective( FSNamesystem fsn, CacheManager cacheManager, CacheDirectiveInfo directive, EnumSet<CacheFlag> flags, boolean logRetryCache) throws IOException { final FSPermissionChecker pc = getFsPermissionChecker(fsn); cacheManager.modifyDirective(directive, pc, flags); fsn.getEditLog().logModifyCacheDirectiveInfo(directive, logRetryCache); }
Example #19
Source File: DistributedFileSystem.java From big-c with Apache License 2.0 | 5 votes |
/** * Add a new CacheDirective. * * @param info Information about a directive to add. * @param flags {@link CacheFlag}s to use for this operation. * @return the ID of the directive that was created. * @throws IOException if the directive could not be added */ public long addCacheDirective( CacheDirectiveInfo info, EnumSet<CacheFlag> flags) throws IOException { Preconditions.checkNotNull(info.getPath()); Path path = new Path(getPathName(fixRelativePart(info.getPath()))). makeQualified(getUri(), getWorkingDirectory()); return dfs.addCacheDirective( new CacheDirectiveInfo.Builder(info). setPath(path). build(), flags); }
Example #20
Source File: DistributedFileSystem.java From big-c with Apache License 2.0 | 5 votes |
/** * Modify a CacheDirective. * * @param info Information about the directive to modify. You must set the ID * to indicate which CacheDirective you want to modify. * @param flags {@link CacheFlag}s to use for this operation. * @throws IOException if the directive could not be modified */ public void modifyCacheDirective( CacheDirectiveInfo info, EnumSet<CacheFlag> flags) throws IOException { if (info.getPath() != null) { info = new CacheDirectiveInfo.Builder(info). setPath(new Path(getPathName(fixRelativePart(info.getPath()))). makeQualified(getUri(), getWorkingDirectory())).build(); } dfs.modifyCacheDirective(info, flags); }
Example #21
Source File: ClientNamenodeProtocolTranslatorPB.java From hadoop with Apache License 2.0 | 5 votes |
@Override public void modifyCacheDirective(CacheDirectiveInfo directive, EnumSet<CacheFlag> flags) throws IOException { try { ModifyCacheDirectiveRequestProto.Builder builder = ModifyCacheDirectiveRequestProto.newBuilder(). setInfo(PBHelper.convert(directive)); if (!flags.isEmpty()) { builder.setCacheFlags(PBHelper.convertCacheFlags(flags)); } rpcProxy.modifyCacheDirective(null, builder.build()); } catch (ServiceException e) { throw ProtobufHelper.getRemoteException(e); } }
Example #22
Source File: DFSClient.java From hadoop with Apache License 2.0 | 5 votes |
public long addCacheDirective( CacheDirectiveInfo info, EnumSet<CacheFlag> flags) throws IOException { checkOpen(); TraceScope scope = Trace.startSpan("addCacheDirective", traceSampler); try { return namenode.addCacheDirective(info, flags); } catch (RemoteException re) { throw re.unwrapRemoteException(); } finally { scope.close(); } }
Example #23
Source File: DFSClient.java From hadoop with Apache License 2.0 | 5 votes |
public void modifyCacheDirective( CacheDirectiveInfo info, EnumSet<CacheFlag> flags) throws IOException { checkOpen(); TraceScope scope = Trace.startSpan("modifyCacheDirective", traceSampler); try { namenode.modifyCacheDirective(info, flags); } catch (RemoteException re) { throw re.unwrapRemoteException(); } finally { scope.close(); } }
Example #24
Source File: TestRetryCacheWithHA.java From big-c with Apache License 2.0 | 5 votes |
@Override void invoke() throws Exception { client.modifyCacheDirective( new CacheDirectiveInfo.Builder(). setId(id). setReplication(newReplication). build(), EnumSet.of(CacheFlag.FORCE)); }
Example #25
Source File: DistributedFileSystem.java From hadoop with Apache License 2.0 | 5 votes |
/** * Add a new CacheDirective. * * @param info Information about a directive to add. * @param flags {@link CacheFlag}s to use for this operation. * @return the ID of the directive that was created. * @throws IOException if the directive could not be added */ public long addCacheDirective( CacheDirectiveInfo info, EnumSet<CacheFlag> flags) throws IOException { Preconditions.checkNotNull(info.getPath()); Path path = new Path(getPathName(fixRelativePart(info.getPath()))). makeQualified(getUri(), getWorkingDirectory()); return dfs.addCacheDirective( new CacheDirectiveInfo.Builder(info). setPath(path). build(), flags); }
Example #26
Source File: DistributedFileSystem.java From hadoop with Apache License 2.0 | 5 votes |
/** * Modify a CacheDirective. * * @param info Information about the directive to modify. You must set the ID * to indicate which CacheDirective you want to modify. * @param flags {@link CacheFlag}s to use for this operation. * @throws IOException if the directive could not be modified */ public void modifyCacheDirective( CacheDirectiveInfo info, EnumSet<CacheFlag> flags) throws IOException { if (info.getPath() != null) { info = new CacheDirectiveInfo.Builder(info). setPath(new Path(getPathName(fixRelativePart(info.getPath()))). makeQualified(getUri(), getWorkingDirectory())).build(); } dfs.modifyCacheDirective(info, flags); }
Example #27
Source File: FSNDNCacheOp.java From hadoop with Apache License 2.0 | 5 votes |
static void modifyCacheDirective( FSNamesystem fsn, CacheManager cacheManager, CacheDirectiveInfo directive, EnumSet<CacheFlag> flags, boolean logRetryCache) throws IOException { final FSPermissionChecker pc = getFsPermissionChecker(fsn); cacheManager.modifyDirective(directive, pc, flags); fsn.getEditLog().logModifyCacheDirectiveInfo(directive, logRetryCache); }
Example #28
Source File: CacheManager.java From hadoop with Apache License 2.0 | 5 votes |
public CacheDirectiveInfo addDirective( CacheDirectiveInfo info, FSPermissionChecker pc, EnumSet<CacheFlag> flags) throws IOException { assert namesystem.hasWriteLock(); CacheDirective directive; try { CachePool pool = getCachePool(validatePoolName(info)); checkWritePermission(pc, pool); String path = validatePath(info); short replication = validateReplication(info, (short)1); long expiryTime = validateExpiryTime(info, pool.getMaxRelativeExpiryMs()); // Do quota validation if required if (!flags.contains(CacheFlag.FORCE)) { checkLimit(pool, path, replication); } // All validation passed // Add a new entry with the next available ID. long id = getNextDirectiveId(); directive = new CacheDirective(id, path, replication, expiryTime); addInternal(directive, pool); } catch (IOException e) { LOG.warn("addDirective of " + info + " failed: ", e); throw e; } LOG.info("addDirective of {} successful.", info); return directive.toInfo(); }
Example #29
Source File: PBHelper.java From hadoop with Apache License 2.0 | 5 votes |
public static int convertCacheFlags(EnumSet<CacheFlag> flags) { int value = 0; if (flags.contains(CacheFlag.FORCE)) { value |= CacheFlagProto.FORCE.getNumber(); } return value; }
Example #30
Source File: PBHelper.java From hadoop with Apache License 2.0 | 5 votes |
public static EnumSet<CacheFlag> convertCacheFlags(int flags) { EnumSet<CacheFlag> result = EnumSet.noneOf(CacheFlag.class); if ((flags & CacheFlagProto.FORCE_VALUE) == CacheFlagProto.FORCE_VALUE) { result.add(CacheFlag.FORCE); } return result; }