org.apache.hadoop.security.token.TokenIdentifier Java Examples
The following examples show how to use
org.apache.hadoop.security.token.TokenIdentifier.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TestTokenAspect.java From hadoop with Apache License 2.0 | 6 votes |
@Test public void testInitWithUGIToken() throws IOException, URISyntaxException { Configuration conf = new Configuration(); DummyFs fs = spy(new DummyFs()); doReturn(null).when(fs).getDelegationToken(anyString()); Token<TokenIdentifier> token = new Token<TokenIdentifier>(new byte[0], new byte[0], DummyFs.TOKEN_KIND, new Text("127.0.0.1:1234")); fs.ugi.addToken(token); fs.ugi.addToken(new Token<TokenIdentifier>(new byte[0], new byte[0], new Text("Other token"), new Text("127.0.0.1:8021"))); assertEquals("wrong tokens in user", 2, fs.ugi.getTokens().size()); fs.emulateSecurityEnabled = true; fs.initialize(new URI("dummyfs://127.0.0.1:1234"), conf); fs.tokenAspect.ensureTokenInitialized(); // Select a token from ugi (not from the remote host), store it but don't // renew it verify(fs).setDelegationToken(token); verify(fs, never()).getDelegationToken(anyString()); assertNull(Whitebox.getInternalState(fs.tokenAspect, "dtRenewer")); assertNull(Whitebox.getInternalState(fs.tokenAspect, "action")); }
Example #2
Source File: TestTokenAspect.java From big-c with Apache License 2.0 | 6 votes |
@Test public void testInitWithUGIToken() throws IOException, URISyntaxException { Configuration conf = new Configuration(); DummyFs fs = spy(new DummyFs()); doReturn(null).when(fs).getDelegationToken(anyString()); Token<TokenIdentifier> token = new Token<TokenIdentifier>(new byte[0], new byte[0], DummyFs.TOKEN_KIND, new Text("127.0.0.1:1234")); fs.ugi.addToken(token); fs.ugi.addToken(new Token<TokenIdentifier>(new byte[0], new byte[0], new Text("Other token"), new Text("127.0.0.1:8021"))); assertEquals("wrong tokens in user", 2, fs.ugi.getTokens().size()); fs.emulateSecurityEnabled = true; fs.initialize(new URI("dummyfs://127.0.0.1:1234"), conf); fs.tokenAspect.ensureTokenInitialized(); // Select a token from ugi (not from the remote host), store it but don't // renew it verify(fs).setDelegationToken(token); verify(fs, never()).getDelegationToken(anyString()); assertNull(Whitebox.getInternalState(fs.tokenAspect, "dtRenewer")); assertNull(Whitebox.getInternalState(fs.tokenAspect, "action")); }
Example #3
Source File: LocalizerSecurityInfo.java From big-c with Apache License 2.0 | 6 votes |
@Override public TokenInfo getTokenInfo(Class<?> protocol, Configuration conf) { if (!protocol .equals(LocalizationProtocolPB.class)) { return null; } return new TokenInfo() { @Override public Class<? extends Annotation> annotationType() { return null; } @Override public Class<? extends TokenSelector<? extends TokenIdentifier>> value() { LOG.debug("Using localizerTokenSecurityInfo"); return LocalizerTokenSelector.class; } }; }
Example #4
Source File: TestBinaryTokenFile.java From hadoop with Apache License 2.0 | 6 votes |
private static void createBinaryTokenFile(Configuration conf) { // Fetch delegation tokens and store in binary token file. try { Credentials cred1 = new Credentials(); Credentials cred2 = new Credentials(); TokenCache.obtainTokensForNamenodesInternal(cred1, new Path[] { p1 }, conf); for (Token<? extends TokenIdentifier> t : cred1.getAllTokens()) { cred2.addToken(new Text(DELEGATION_TOKEN_KEY), t); } DataOutputStream os = new DataOutputStream(new FileOutputStream( binaryTokenFileName.toString())); try { cred2.writeTokenStorageToStream(os); } finally { os.close(); } } catch (IOException e) { Assert.fail("Exception " + e); } }
Example #5
Source File: ClientHSSecurityInfo.java From hadoop with Apache License 2.0 | 6 votes |
@Override public TokenInfo getTokenInfo(Class<?> protocol, Configuration conf) { if (!protocol .equals(HSClientProtocolPB.class)) { return null; } return new TokenInfo() { @Override public Class<? extends Annotation> annotationType() { return null; } @Override public Class<? extends TokenSelector<? extends TokenIdentifier>> value() { return ClientHSTokenSelector.class; } }; }
Example #6
Source File: TestTezClientUtils.java From tez with Apache License 2.0 | 6 votes |
@Test public void testSessionCredentialsMergedBeforeAmConfigCredentials() throws Exception { TezConfiguration conf = new TezConfiguration(); Text tokenType = new Text("TEST_TOKEN_TYPE"); Text tokenKind = new Text("TEST_TOKEN_KIND"); Text tokenService = new Text("TEST_TOKEN_SERVICE"); Credentials amConfigCredentials = new Credentials(); amConfigCredentials.addToken(tokenType, new Token<>("id1".getBytes(), null, tokenKind, tokenService)); Credentials sessionCredentials = new Credentials(); Token<TokenIdentifier> sessionToken = new Token<>("id2".getBytes(), null, tokenKind, tokenService); sessionCredentials.addToken(tokenType, sessionToken); AMConfiguration amConfig = new AMConfiguration(conf, null, amConfigCredentials); Credentials amLaunchCredentials = TezClientUtils.prepareAmLaunchCredentials(amConfig, sessionCredentials, conf, null); // if there is another token in am conf creds of the same token type, // session token should be applied while creating ContainerLaunchContext Assert.assertEquals(sessionToken, amLaunchCredentials.getToken(tokenType)); }
Example #7
Source File: Utils.java From flink with Apache License 2.0 | 6 votes |
public static void setTokensFor(ContainerLaunchContext amContainer, List<Path> paths, Configuration conf) throws IOException { Credentials credentials = new Credentials(); // for HDFS TokenCache.obtainTokensForNamenodes(credentials, paths.toArray(new Path[0]), conf); // for HBase obtainTokenForHBase(credentials, conf); // for user UserGroupInformation currUsr = UserGroupInformation.getCurrentUser(); Collection<Token<? extends TokenIdentifier>> usrTok = currUsr.getTokens(); for (Token<? extends TokenIdentifier> token : usrTok) { final Text id = new Text(token.getIdentifier()); LOG.info("Adding user token " + id + " with " + token); credentials.addToken(id, token); } try (DataOutputBuffer dob = new DataOutputBuffer()) { credentials.writeTokenStorageToStream(dob); if (LOG.isDebugEnabled()) { LOG.debug("Wrote tokens. Credentials buffer length: " + dob.getLength()); } ByteBuffer securityTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength()); amContainer.setTokens(securityTokens); } }
Example #8
Source File: TestBlockToken.java From hadoop with Apache License 2.0 | 6 votes |
@Override public GetReplicaVisibleLengthResponseProto answer( InvocationOnMock invocation) throws IOException { Object args[] = invocation.getArguments(); assertEquals(2, args.length); GetReplicaVisibleLengthRequestProto req = (GetReplicaVisibleLengthRequestProto) args[1]; Set<TokenIdentifier> tokenIds = UserGroupInformation.getCurrentUser() .getTokenIdentifiers(); assertEquals("Only one BlockTokenIdentifier expected", 1, tokenIds.size()); long result = 0; for (TokenIdentifier tokenId : tokenIds) { BlockTokenIdentifier id = (BlockTokenIdentifier) tokenId; LOG.info("Got: " + id.toString()); assertTrue("Received BlockTokenIdentifier is wrong", ident.equals(id)); sm.checkAccess(id, null, PBHelper.convert(req.getBlock()), BlockTokenSecretManager.AccessMode.WRITE); result = id.getBlockId(); } return GetReplicaVisibleLengthResponseProto.newBuilder() .setLength(result).build(); }
Example #9
Source File: TestUserGroupInformation.java From hadoop with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") // from Mockito mocks @Test (timeout = 30000) public <T extends TokenIdentifier> void testGetCreds() throws Exception { UserGroupInformation ugi = UserGroupInformation.createRemoteUser("someone"); Text service = new Text("service"); Token<T> t1 = mock(Token.class); when(t1.getService()).thenReturn(service); Token<T> t2 = mock(Token.class); when(t2.getService()).thenReturn(new Text("service2")); Token<T> t3 = mock(Token.class); when(t3.getService()).thenReturn(service); // add token to ugi ugi.addToken(t1); ugi.addToken(t2); checkTokens(ugi, t1, t2); Credentials creds = ugi.getCredentials(); creds.addToken(t3.getService(), t3); assertSame(t3, creds.getToken(service)); // check that ugi wasn't modified checkTokens(ugi, t1, t2); }
Example #10
Source File: TestClientToAMTokens.java From big-c with Apache License 2.0 | 6 votes |
@Override public TokenInfo getTokenInfo(Class<?> protocol, Configuration conf) { return new TokenInfo() { @Override public Class<? extends Annotation> annotationType() { return null; } @Override public Class<? extends TokenSelector<? extends TokenIdentifier>> value() { return ClientToAMTokenSelector.class; } }; }
Example #11
Source File: TestTokenCache.java From tez with Apache License 2.0 | 6 votes |
private MockFileSystem createFileSystemForServiceName(final String service) throws IOException { MockFileSystem mockFs = new MockFileSystem(); when(mockFs.getCanonicalServiceName()).thenReturn(service); when(mockFs.getDelegationToken(any(String.class))).thenAnswer( new Answer<Token<?>>() { int unique = 0; @Override public Token<?> answer(InvocationOnMock invocation) throws Throwable { Token<?> token = new Token<TokenIdentifier>(); token.setService(new Text(service)); // use unique value so when we restore from token storage, we can // tell if it's really the same token token.setKind(new Text("token" + unique++)); return token; } }); return mockFs; }
Example #12
Source File: TestClientToAMTokens.java From hadoop with Apache License 2.0 | 6 votes |
@Override public TokenInfo getTokenInfo(Class<?> protocol, Configuration conf) { return new TokenInfo() { @Override public Class<? extends Annotation> annotationType() { return null; } @Override public Class<? extends TokenSelector<? extends TokenIdentifier>> value() { return ClientToAMTokenSelector.class; } }; }
Example #13
Source File: RMDelegationTokenSelector.java From hadoop with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") public Token<RMDelegationTokenIdentifier> selectToken(Text service, Collection<Token<? extends TokenIdentifier>> tokens) { if (service == null) { return null; } LOG.debug("Looking for a token with service " + service.toString()); for (Token<? extends TokenIdentifier> token : tokens) { LOG.debug("Token kind is " + token.getKind().toString() + " and the token's service name is " + token.getService()); if (RMDelegationTokenIdentifier.KIND_NAME.equals(token.getKind()) && checkService(service, token)) { return (Token<RMDelegationTokenIdentifier>) token; } } return null; }
Example #14
Source File: LaunchContainerRunnable.java From attic-apex-core with Apache License 2.0 | 6 votes |
public static ByteBuffer getTokens(UserGroupInformation ugi, Token<StramDelegationTokenIdentifier> delegationToken) { try { Collection<Token<? extends TokenIdentifier>> tokens = ugi.getCredentials().getAllTokens(); Credentials credentials = new Credentials(); for (Token<? extends TokenIdentifier> token : tokens) { if (!token.getKind().equals(AMRMTokenIdentifier.KIND_NAME)) { credentials.addToken(token.getService(), token); LOG.debug("Passing container token {}", token); } } credentials.addToken(delegationToken.getService(), delegationToken); DataOutputBuffer dataOutput = new DataOutputBuffer(); credentials.writeTokenStorageToStream(dataOutput); byte[] tokenBytes = dataOutput.getData(); ByteBuffer cTokenBuf = ByteBuffer.wrap(tokenBytes); return cTokenBuf.duplicate(); } catch (IOException e) { throw new RuntimeException("Error generating delegation token", e); } }
Example #15
Source File: ClientToAMTokenSelector.java From hadoop with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") public Token<ClientToAMTokenIdentifier> selectToken(Text service, Collection<Token<? extends TokenIdentifier>> tokens) { if (service == null) { return null; } LOG.debug("Looking for a token with service " + service.toString()); for (Token<? extends TokenIdentifier> token : tokens) { LOG.debug("Token kind is " + token.getKind().toString() + " and the token's service name is " + token.getService()); if (ClientToAMTokenIdentifier.KIND_NAME.equals(token.getKind()) && service.equals(token.getService())) { return (Token<ClientToAMTokenIdentifier>) token; } } return null; }
Example #16
Source File: Server.java From hadoop with Apache License 2.0 | 6 votes |
private UserGroupInformation getAuthorizedUgi(String authorizedId) throws InvalidToken, AccessControlException { if (authMethod == AuthMethod.TOKEN) { TokenIdentifier tokenId = SaslRpcServer.getIdentifier(authorizedId, secretManager); UserGroupInformation ugi = tokenId.getUser(); if (ugi == null) { throw new AccessControlException( "Can't retrieve username from tokenIdentifier."); } ugi.addTokenIdentifier(tokenId); return ugi; } else { return UserGroupInformation.createRemoteUser(authorizedId, authMethod); } }
Example #17
Source File: ApplicationMaster.java From stratosphere with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { final String yarnClientUsername = System.getenv(Client.ENV_CLIENT_USERNAME); LOG.info("YARN daemon runs as '"+UserGroupInformation.getCurrentUser().getShortUserName()+"' setting" + " user to execute Stratosphere ApplicationMaster/JobManager to '"+yarnClientUsername+"'"); UserGroupInformation ugi = UserGroupInformation.createRemoteUser(yarnClientUsername); for(Token<? extends TokenIdentifier> toks : UserGroupInformation.getCurrentUser().getTokens()) { ugi.addToken(toks); } ugi.doAs(new PrivilegedAction<Object>() { @Override public Object run() { try { new ApplicationMaster().run(); } catch (Exception e) { e.printStackTrace(); } return null; } }); }
Example #18
Source File: CustomSaslAuthenticationProviderTestBase.java From hbase with Apache License 2.0 | 6 votes |
@Override public UserGroupInformation getAuthorizedUgi(String authzId, SecretManager<TokenIdentifier> secretManager) throws IOException { UserGroupInformation authorizedUgi; byte[] encodedId = SaslUtil.decodeIdentifier(authzId); PasswordAuthTokenIdentifier tokenId = new PasswordAuthTokenIdentifier(); try { tokenId.readFields(new DataInputStream(new ByteArrayInputStream(encodedId))); } catch (IOException e) { throw new IOException("Can't de-serialize PasswordAuthTokenIdentifier", e); } authorizedUgi = tokenId.getUser(); if (authorizedUgi == null) { throw new AccessDeniedException("Can't retrieve username from tokenIdentifier."); } authorizedUgi.addTokenIdentifier(tokenId); authorizedUgi.setAuthenticationMethod(getSaslAuthMethod().getAuthMethod()); return authorizedUgi; }
Example #19
Source File: HadoopSecurityManager_H_2_0.java From azkaban-plugins with Apache License 2.0 | 6 votes |
private void cancelNameNodeToken(final Token<? extends TokenIdentifier> t, String userToProxy) throws HadoopSecurityManagerException { try { getProxiedUser(userToProxy).doAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { cancelToken(t); return null; } private void cancelToken(Token<?> nt) throws IOException, InterruptedException { nt.cancel(conf); } }); } catch (Exception e) { throw new HadoopSecurityManagerException("Failed to cancel token. " + e.getMessage() + e.getCause(), e); } }
Example #20
Source File: AMRMTokenSelector.java From big-c with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") public Token<AMRMTokenIdentifier> selectToken(Text service, Collection<Token<? extends TokenIdentifier>> tokens) { if (service == null) { return null; } LOG.debug("Looking for a token with service " + service.toString()); for (Token<? extends TokenIdentifier> token : tokens) { LOG.debug("Token kind is " + token.getKind().toString() + " and the token's service name is " + token.getService()); if (AMRMTokenIdentifier.KIND_NAME.equals(token.getKind()) && checkService(service, token)) { return (Token<AMRMTokenIdentifier>) token; } } return null; }
Example #21
Source File: YarnRPC.java From big-c with Apache License 2.0 | 5 votes |
public Server getServer(Class protocol, Object instance, InetSocketAddress addr, Configuration conf, SecretManager<? extends TokenIdentifier> secretManager, int numHandlers) { return getServer(protocol, instance, addr, conf, secretManager, numHandlers, null); }
Example #22
Source File: TestAMAuthorization.java From hadoop with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") public static Token<? extends TokenIdentifier> setupAndReturnAMRMToken( InetSocketAddress rmBindAddress, Collection<Token<? extends TokenIdentifier>> allTokens) { for (Token<? extends TokenIdentifier> token : allTokens) { if (token.getKind().equals(AMRMTokenIdentifier.KIND_NAME)) { SecurityUtil.setTokenService(token, rmBindAddress); return (Token<AMRMTokenIdentifier>) token; } } return null; }
Example #23
Source File: TestUserGroupInformation.java From big-c with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") // from Mockito mocks @Test (timeout = 30000) public <T extends TokenIdentifier> void testAddToken() throws Exception { UserGroupInformation ugi = UserGroupInformation.createRemoteUser("someone"); Token<T> t1 = mock(Token.class); Token<T> t2 = mock(Token.class); Token<T> t3 = mock(Token.class); // add token to ugi ugi.addToken(t1); checkTokens(ugi, t1); // replace token t1 with t2 - with same key (null) ugi.addToken(t2); checkTokens(ugi, t2); // change t1 service and add token when(t1.getService()).thenReturn(new Text("t1")); ugi.addToken(t1); checkTokens(ugi, t1, t2); // overwrite t1 token with t3 - same key (!null) when(t3.getService()).thenReturn(new Text("t1")); ugi.addToken(t3); checkTokens(ugi, t2, t3); // just try to re-add with new name when(t1.getService()).thenReturn(new Text("t1.1")); ugi.addToken(t1); checkTokens(ugi, t1, t2, t3); // just try to re-add with new name again ugi.addToken(t1); checkTokens(ugi, t1, t2, t3); }
Example #24
Source File: TestClientRMTokens.java From hadoop with Apache License 2.0 | 5 votes |
@Override public Server getServer(Class protocol, Object instance, InetSocketAddress addr, Configuration conf, SecretManager<? extends TokenIdentifier> secretManager, int numHandlers, String portRangeConfig) { throw new RuntimeException("getServer"); }
Example #25
Source File: Server.java From hadoop with Apache License 2.0 | 5 votes |
protected Server(String bindAddress, int port, Class<? extends Writable> rpcRequestClass, int handlerCount, int numReaders, int queueSizePerHandler, Configuration conf, String serverName, SecretManager<? extends TokenIdentifier> secretManager) throws IOException { this(bindAddress, port, rpcRequestClass, handlerCount, numReaders, queueSizePerHandler, conf, serverName, secretManager, null); }
Example #26
Source File: GssSaslServerAuthenticationProvider.java From hbase with Apache License 2.0 | 5 votes |
@Override public UserGroupInformation getAuthorizedUgi(String authzId, SecretManager<TokenIdentifier> secretManager) throws IOException { UserGroupInformation ugi = UserGroupInformation.createRemoteUser(authzId); ugi.setAuthenticationMethod(getSaslAuthMethod().getAuthMethod()); return ugi; }
Example #27
Source File: NamenodeWebHdfsMethods.java From hadoop with Apache License 2.0 | 5 votes |
private Token<? extends TokenIdentifier> generateDelegationToken( final NameNode namenode, final UserGroupInformation ugi, final String renewer) throws IOException { final Credentials c = DelegationTokenSecretManager.createCredentials( namenode, ugi, renewer != null? renewer: ugi.getShortUserName()); if (c == null) { return null; } final Token<? extends TokenIdentifier> t = c.getAllTokens().iterator().next(); Text kind = request.getScheme().equals("http") ? WebHdfsFileSystem.TOKEN_KIND : SWebHdfsFileSystem.TOKEN_KIND; t.setKind(kind); return t; }
Example #28
Source File: ProtobufRpcEngine.java From hadoop with Apache License 2.0 | 5 votes |
/** * Construct an RPC server. * * @param protocolClass the class of protocol * @param protocolImpl the protocolImpl whose methods will be called * @param conf the configuration to use * @param bindAddress the address to bind on to listen for connection * @param port the port to listen for connections on * @param numHandlers the number of method handler threads to run * @param verbose whether each call should be logged * @param portRangeConfig A config parameter that can be used to restrict * the range of ports used when port is 0 (an ephemeral port) */ public Server(Class<?> protocolClass, Object protocolImpl, Configuration conf, String bindAddress, int port, int numHandlers, int numReaders, int queueSizePerHandler, boolean verbose, SecretManager<? extends TokenIdentifier> secretManager, String portRangeConfig) throws IOException { super(bindAddress, port, null, numHandlers, numReaders, queueSizePerHandler, conf, classNameBase(protocolImpl .getClass().getName()), secretManager, portRangeConfig); this.verbose = verbose; registerProtocolAndImpl(RPC.RpcKind.RPC_PROTOCOL_BUFFER, protocolClass, protocolImpl); }
Example #29
Source File: HadoopSecurityManager_H_1_0.java From azkaban-plugins with Apache License 2.0 | 5 votes |
private void cancelHiveToken(final Token<? extends TokenIdentifier> t, String userToProxy) throws HadoopSecurityManagerException { try { HiveConf hiveConf = new HiveConf(); HiveMetaStoreClient hiveClient = new HiveMetaStoreClient(hiveConf); hiveClient.cancelDelegationToken(t.encodeToUrlString()); } catch (Exception e) { e.printStackTrace(); throw new HadoopSecurityManagerException("Failed to cancel Token. " + e.getMessage() + e.getCause()); } }
Example #30
Source File: SimpleSaslServerAuthenticationProvider.java From hbase with Apache License 2.0 | 5 votes |
@Override public UserGroupInformation getAuthorizedUgi(String authzId, SecretManager<TokenIdentifier> secretManager) throws IOException { UserGroupInformation ugi = UserGroupInformation.createRemoteUser(authzId); ugi.setAuthenticationMethod(getSaslAuthMethod().getAuthMethod()); return ugi; }