Java Code Examples for org.apache.hadoop.security.UserGroupInformation#setAuthenticationMethod()
The following examples show how to use
org.apache.hadoop.security.UserGroupInformation#setAuthenticationMethod() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestDFSClientCache.java From hadoop with Apache License 2.0 | 6 votes |
@Test public void testGetUserGroupInformationSecure() throws IOException { String userName = "user1"; String currentUser = "test-user"; NfsConfiguration conf = new NfsConfiguration(); UserGroupInformation currentUserUgi = UserGroupInformation.createRemoteUser(currentUser); currentUserUgi.setAuthenticationMethod(KERBEROS); UserGroupInformation.setLoginUser(currentUserUgi); DFSClientCache cache = new DFSClientCache(conf); UserGroupInformation ugiResult = cache.getUserGroupInformation(userName, currentUserUgi); assertThat(ugiResult.getUserName(), is(userName)); assertThat(ugiResult.getRealUser(), is(currentUserUgi)); assertThat( ugiResult.getAuthenticationMethod(), is(UserGroupInformation.AuthenticationMethod.PROXY)); }
Example 2
Source File: CustomSaslAuthenticationProviderTestBase.java From hbase with Apache License 2.0 | 6 votes |
@Override public UserGroupInformation getAuthorizedUgi(String authzId, SecretManager<TokenIdentifier> secretManager) throws IOException { UserGroupInformation authorizedUgi; byte[] encodedId = SaslUtil.decodeIdentifier(authzId); PasswordAuthTokenIdentifier tokenId = new PasswordAuthTokenIdentifier(); try { tokenId.readFields(new DataInputStream(new ByteArrayInputStream(encodedId))); } catch (IOException e) { throw new IOException("Can't de-serialize PasswordAuthTokenIdentifier", e); } authorizedUgi = tokenId.getUser(); if (authorizedUgi == null) { throw new AccessDeniedException("Can't retrieve username from tokenIdentifier."); } authorizedUgi.addTokenIdentifier(tokenId); authorizedUgi.setAuthenticationMethod(getSaslAuthMethod().getAuthMethod()); return authorizedUgi; }
Example 3
Source File: TestDFSClientCache.java From big-c with Apache License 2.0 | 6 votes |
@Test public void testGetUserGroupInformationSecure() throws IOException { String userName = "user1"; String currentUser = "test-user"; NfsConfiguration conf = new NfsConfiguration(); UserGroupInformation currentUserUgi = UserGroupInformation.createRemoteUser(currentUser); currentUserUgi.setAuthenticationMethod(KERBEROS); UserGroupInformation.setLoginUser(currentUserUgi); DFSClientCache cache = new DFSClientCache(conf); UserGroupInformation ugiResult = cache.getUserGroupInformation(userName, currentUserUgi); assertThat(ugiResult.getUserName(), is(userName)); assertThat(ugiResult.getRealUser(), is(currentUserUgi)); assertThat( ugiResult.getAuthenticationMethod(), is(UserGroupInformation.AuthenticationMethod.PROXY)); }
Example 4
Source File: AbstractDelegationTokenIdentifier.java From big-c with Apache License 2.0 | 6 votes |
/** * Get the username encoded in the token identifier * * @return the username or owner */ @Override public UserGroupInformation getUser() { if ( (owner == null) || (owner.toString().isEmpty())) { return null; } final UserGroupInformation realUgi; final UserGroupInformation ugi; if ((realUser == null) || (realUser.toString().isEmpty()) || realUser.equals(owner)) { ugi = realUgi = UserGroupInformation.createRemoteUser(owner.toString()); } else { realUgi = UserGroupInformation.createRemoteUser(realUser.toString()); ugi = UserGroupInformation.createProxyUser(owner.toString(), realUgi); } realUgi.setAuthenticationMethod(AuthenticationMethod.TOKEN); return ugi; }
Example 5
Source File: UGIProvider.java From pxf with Apache License 2.0 | 5 votes |
/** * Wrapper for {@link UserGroupInformation} creation of remote users * * @param user the name of the remote user * @param session session containing information on current configuration and login user * @return a remote {@link UserGroupInformation}. */ UserGroupInformation createRemoteUser(String user, SessionId session) throws IOException { if (Utilities.isSecurityEnabled(session.getConfiguration())) { UserGroupInformation proxyUGI = createProxyUGI(user, session.getLoginUser()); proxyUGI.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS); return proxyUGI; } return UserGroupInformation.createRemoteUser(user); }
Example 6
Source File: TestSecureOzoneCluster.java From hadoop-ozone with Apache License 2.0 | 5 votes |
@Test public void testSCMSecurityProtocol() throws Exception { initSCM(); scm = HddsTestUtils.getScm(conf); //Reads the SCM Info from SCM instance try { scm.start(); // Case 1: User with Kerberos credentials should succeed. UserGroupInformation ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI( testUserPrincipal, testUserKeytab.getCanonicalPath()); ugi.setAuthenticationMethod(KERBEROS); SCMSecurityProtocol scmSecurityProtocolClient = HddsServerUtil.getScmSecurityClient(conf, ugi); assertNotNull(scmSecurityProtocolClient); String caCert = scmSecurityProtocolClient.getCACertificate(); assertNotNull(caCert); LambdaTestUtils.intercept(RemoteException.class, "Certificate not found", () -> scmSecurityProtocolClient.getCertificate("1")); // Case 2: User without Kerberos credentials should fail. ugi = UserGroupInformation.createRemoteUser("test"); ugi.setAuthenticationMethod(AuthMethod.TOKEN); SCMSecurityProtocol finalScmSecurityProtocolClient = HddsServerUtil.getScmSecurityClient(conf, ugi); String cannotAuthMessage = "Client cannot authenticate via:[KERBEROS]"; LambdaTestUtils.intercept(IOException.class, cannotAuthMessage, finalScmSecurityProtocolClient::getCACertificate); LambdaTestUtils.intercept(IOException.class, cannotAuthMessage, () -> finalScmSecurityProtocolClient.getCertificate("1")); } finally { if (scm != null) { scm.stop(); } } }
Example 7
Source File: WebHdfsPersistReader.java From streams with Apache License 2.0 | 5 votes |
private synchronized void connectToWebHDFS() { try { LOGGER.info("User : {}", this.hdfsConfiguration.getUser()); UserGroupInformation ugi = UserGroupInformation.createRemoteUser(this.hdfsConfiguration.getUser()); ugi.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.SIMPLE); ugi.doAs((PrivilegedExceptionAction<Void>) () -> { Configuration conf = new Configuration(); conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); conf.set("fs.hdfs.impl", DistributedFileSystem.class.getName()); conf.set("fs.file.impl", LocalFileSystem.class.getName()); LOGGER.info("WebURI : {}", getURI().toString()); client = FileSystem.get(getURI(), conf); LOGGER.info("Connected to WebHDFS"); /* * ************************************************************************************************ * This code is an example of how you would work with HDFS and you weren't going over * the webHDFS protocol. * * Smashew: 2013-10-01 * ************************************************************************************************ conf.set("fs.defaultFS", "hdfs://hadoop.mdigitallife.com:8020/user/" + userName); conf.set("namenode.host","0.0.0.0"); conf.set("hadoop.job.ugi", userName); conf.set(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, "runner"); fileSystem.createNewFile(new Path("/user/"+ userName + "/test")); FileStatus[] status = fs.listStatus(new Path("/user/" + userName)); for(int i=0;i<status.length;i++) { LOGGER.info("Directory: {}", status[i].getPath()); } */ return null; }); } catch (Exception ex) { LOGGER.error("There was an error connecting to WebHDFS, please check your settings and try again"); ex.printStackTrace(); } }
Example 8
Source File: DigestSaslServerAuthenticationProvider.java From hbase with Apache License 2.0 | 5 votes |
@Override public UserGroupInformation getAuthorizedUgi(String authzId, SecretManager<TokenIdentifier> secretManager) throws IOException { UserGroupInformation authorizedUgi; TokenIdentifier tokenId = HBaseSaslRpcServer.getIdentifier(authzId, secretManager); authorizedUgi = tokenId.getUser(); if (authorizedUgi == null) { throw new AccessDeniedException( "Can't retrieve username from tokenIdentifier."); } authorizedUgi.addTokenIdentifier(tokenId); authorizedUgi.setAuthenticationMethod(getSaslAuthMethod().getAuthMethod()); return authorizedUgi; }
Example 9
Source File: GssSaslServerAuthenticationProvider.java From hbase with Apache License 2.0 | 5 votes |
@Override public UserGroupInformation getAuthorizedUgi(String authzId, SecretManager<TokenIdentifier> secretManager) throws IOException { UserGroupInformation ugi = UserGroupInformation.createRemoteUser(authzId); ugi.setAuthenticationMethod(getSaslAuthMethod().getAuthMethod()); return ugi; }
Example 10
Source File: SimpleSaslServerAuthenticationProvider.java From hbase with Apache License 2.0 | 5 votes |
@Override public UserGroupInformation getAuthorizedUgi(String authzId, SecretManager<TokenIdentifier> secretManager) throws IOException { UserGroupInformation ugi = UserGroupInformation.createRemoteUser(authzId); ugi.setAuthenticationMethod(getSaslAuthMethod().getAuthMethod()); return ugi; }
Example 11
Source File: WebHdfsPersistWriter.java From streams with Apache License 2.0 | 4 votes |
private synchronized void connectToWebHDFS() { try { LOGGER.info("User : {}", this.hdfsConfiguration.getUser()); UserGroupInformation ugi = UserGroupInformation.createRemoteUser(this.hdfsConfiguration.getUser()); ugi.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.SIMPLE); ugi.doAs(new PrivilegedExceptionAction<Void>() { public Void run() throws Exception { Configuration conf = new Configuration(); conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); LOGGER.info("WebURI : {}", getURI().toString()); client = FileSystem.get(getURI(), conf); LOGGER.info("Connected to WebHDFS"); /* * ************************************************************************************************ * This code is an example of how you would work with HDFS and you weren't going over * the webHDFS protocol. * * Smashew: 2013-10-01 * ************************************************************************************************ conf.set("fs.defaultFS", "hdfs://hadoop.mdigitallife.com:8020/user/" + userName); conf.set("namenode.host","0.0.0.0"); conf.set("hadoop.job.ugi", userName); conf.set(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, "runner"); fileSystem.createNewFile(new Path("/user/"+ userName + "/test")); FileStatus[] status = fs.listStatus(new Path("/user/" + userName)); for(int i=0;i<status.length;i++) { LOGGER.info("Directory: {}", status[i].getPath()); } */ return null; } }); } catch (Exception ex) { LOGGER.error("There was an error connecting to WebHDFS, please check your settings and try again", ex); throw new RuntimeException(ex); } }
Example 12
Source File: ShadeSaslServerAuthenticationProvider.java From hbase with Apache License 2.0 | 4 votes |
UserGroupInformation createUgiForRemoteUser(String username) { UserGroupInformation ugi = UserGroupInformation.createRemoteUser(username); ugi.setAuthenticationMethod(ShadeSaslAuthenticationProvider.METHOD.getAuthMethod()); return ugi; }
Example 13
Source File: HadoopUtilsTest.java From flink with Apache License 2.0 | 4 votes |
private static UserGroupInformation createTestUser(AuthenticationMethod authenticationMethod) { UserGroupInformation user = UserGroupInformation.createRemoteUser("test-user"); user.setAuthenticationMethod(authenticationMethod); return user; }
Example 14
Source File: TestWebHdfsUrl.java From big-c with Apache License 2.0 | 4 votes |
@Test(timeout=60000) public void testSecureProxyAuthParamsInUrl() throws IOException { Configuration conf = new Configuration(); // fake turning on security so api thinks it should use tokens SecurityUtil.setAuthenticationMethod(KERBEROS, conf); UserGroupInformation.setConfiguration(conf); UserGroupInformation ugi = UserGroupInformation.createRemoteUser("test-user"); ugi.setAuthenticationMethod(KERBEROS); ugi = UserGroupInformation.createProxyUser("test-proxy-user", ugi); UserGroupInformation.setLoginUser(ugi); WebHdfsFileSystem webhdfs = getWebHdfsFileSystem(ugi, conf); Path fsPath = new Path("/"); String tokenString = webhdfs.getDelegationToken().encodeToUrlString(); // send real+effective URL getTokenUrl = webhdfs.toUrl(GetOpParam.Op.GETDELEGATIONTOKEN, fsPath); checkQueryParams( new String[]{ GetOpParam.Op.GETDELEGATIONTOKEN.toQueryString(), new UserParam(ugi.getRealUser().getShortUserName()).toString(), new DoAsParam(ugi.getShortUserName()).toString() }, getTokenUrl); // send real+effective URL renewTokenUrl = webhdfs.toUrl(PutOpParam.Op.RENEWDELEGATIONTOKEN, fsPath, new TokenArgumentParam(tokenString)); checkQueryParams( new String[]{ PutOpParam.Op.RENEWDELEGATIONTOKEN.toQueryString(), new UserParam(ugi.getRealUser().getShortUserName()).toString(), new DoAsParam(ugi.getShortUserName()).toString(), new TokenArgumentParam(tokenString).toString(), }, renewTokenUrl); // send token URL cancelTokenUrl = webhdfs.toUrl(PutOpParam.Op.CANCELDELEGATIONTOKEN, fsPath, new TokenArgumentParam(tokenString)); checkQueryParams( new String[]{ PutOpParam.Op.CANCELDELEGATIONTOKEN.toQueryString(), new UserParam(ugi.getRealUser().getShortUserName()).toString(), new DoAsParam(ugi.getShortUserName()).toString(), new TokenArgumentParam(tokenString).toString(), }, cancelTokenUrl); // send token URL fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath); checkQueryParams( new String[]{ GetOpParam.Op.GETFILESTATUS.toQueryString(), new DelegationParam(tokenString).toString() }, fileStatusUrl); // wipe out internal token to simulate auth always required webhdfs.setDelegationToken(null); // send real+effective cancelTokenUrl = webhdfs.toUrl(PutOpParam.Op.CANCELDELEGATIONTOKEN, fsPath, new TokenArgumentParam(tokenString)); checkQueryParams( new String[]{ PutOpParam.Op.CANCELDELEGATIONTOKEN.toQueryString(), new UserParam(ugi.getRealUser().getShortUserName()).toString(), new DoAsParam(ugi.getShortUserName()).toString(), new TokenArgumentParam(tokenString).toString() }, cancelTokenUrl); // send real+effective fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath); checkQueryParams( new String[]{ GetOpParam.Op.GETFILESTATUS.toQueryString(), new UserParam(ugi.getRealUser().getShortUserName()).toString(), new DoAsParam(ugi.getShortUserName()).toString() }, fileStatusUrl); }
Example 15
Source File: TestWebHdfsUrl.java From big-c with Apache License 2.0 | 4 votes |
@Test(timeout=60000) public void testSecureAuthParamsInUrl() throws IOException { Configuration conf = new Configuration(); // fake turning on security so api thinks it should use tokens SecurityUtil.setAuthenticationMethod(KERBEROS, conf); UserGroupInformation.setConfiguration(conf); UserGroupInformation ugi = UserGroupInformation.createRemoteUser("test-user"); ugi.setAuthenticationMethod(KERBEROS); UserGroupInformation.setLoginUser(ugi); WebHdfsFileSystem webhdfs = getWebHdfsFileSystem(ugi, conf); Path fsPath = new Path("/"); String tokenString = webhdfs.getDelegationToken().encodeToUrlString(); // send user URL getTokenUrl = webhdfs.toUrl(GetOpParam.Op.GETDELEGATIONTOKEN, fsPath); checkQueryParams( new String[]{ GetOpParam.Op.GETDELEGATIONTOKEN.toQueryString(), new UserParam(ugi.getShortUserName()).toString() }, getTokenUrl); // send user URL renewTokenUrl = webhdfs.toUrl(PutOpParam.Op.RENEWDELEGATIONTOKEN, fsPath, new TokenArgumentParam(tokenString)); checkQueryParams( new String[]{ PutOpParam.Op.RENEWDELEGATIONTOKEN.toQueryString(), new UserParam(ugi.getShortUserName()).toString(), new TokenArgumentParam(tokenString).toString(), }, renewTokenUrl); // send token URL cancelTokenUrl = webhdfs.toUrl(PutOpParam.Op.CANCELDELEGATIONTOKEN, fsPath, new TokenArgumentParam(tokenString)); checkQueryParams( new String[]{ PutOpParam.Op.CANCELDELEGATIONTOKEN.toQueryString(), new UserParam(ugi.getShortUserName()).toString(), new TokenArgumentParam(tokenString).toString(), }, cancelTokenUrl); // send token URL fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath); checkQueryParams( new String[]{ GetOpParam.Op.GETFILESTATUS.toQueryString(), new DelegationParam(tokenString).toString() }, fileStatusUrl); // wipe out internal token to simulate auth always required webhdfs.setDelegationToken(null); // send user cancelTokenUrl = webhdfs.toUrl(PutOpParam.Op.CANCELDELEGATIONTOKEN, fsPath, new TokenArgumentParam(tokenString)); checkQueryParams( new String[]{ PutOpParam.Op.CANCELDELEGATIONTOKEN.toQueryString(), new UserParam(ugi.getShortUserName()).toString(), new TokenArgumentParam(tokenString).toString(), }, cancelTokenUrl); // send user fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath); checkQueryParams( new String[]{ GetOpParam.Op.GETFILESTATUS.toQueryString(), new UserParam(ugi.getShortUserName()).toString() }, fileStatusUrl); }
Example 16
Source File: JspHelper.java From big-c with Apache License 2.0 | 4 votes |
/** * Get {@link UserGroupInformation} and possibly the delegation token out of * the request. * @param context the ServletContext that is serving this request. * @param request the http request * @param conf configuration * @param secureAuthMethod the AuthenticationMethod used in secure mode. * @param tryUgiParameter Should it try the ugi parameter? * @return a new user from the request * @throws AccessControlException if the request has no token */ public static UserGroupInformation getUGI(ServletContext context, HttpServletRequest request, Configuration conf, final AuthenticationMethod secureAuthMethod, final boolean tryUgiParameter) throws IOException { UserGroupInformation ugi = null; final String usernameFromQuery = getUsernameFromQuery(request, tryUgiParameter); final String doAsUserFromQuery = request.getParameter(DoAsParam.NAME); final String remoteUser; if (UserGroupInformation.isSecurityEnabled()) { remoteUser = request.getRemoteUser(); final String tokenString = request.getParameter(DELEGATION_PARAMETER_NAME); if (tokenString != null) { // Token-based connections need only verify the effective user, and // disallow proxying to different user. Proxy authorization checks // are not required since the checks apply to issuing a token. ugi = getTokenUGI(context, request, tokenString, conf); checkUsername(ugi.getShortUserName(), usernameFromQuery); checkUsername(ugi.getShortUserName(), doAsUserFromQuery); } else if (remoteUser == null) { throw new IOException( "Security enabled but user not authenticated by filter"); } } else { // Security's not on, pull from url or use default web user remoteUser = (usernameFromQuery == null) ? getDefaultWebUserName(conf) // not specified in request : usernameFromQuery; } if (ugi == null) { // security is off, or there's no token ugi = UserGroupInformation.createRemoteUser(remoteUser); checkUsername(ugi.getShortUserName(), usernameFromQuery); if (UserGroupInformation.isSecurityEnabled()) { // This is not necessarily true, could have been auth'ed by user-facing // filter ugi.setAuthenticationMethod(secureAuthMethod); } if (doAsUserFromQuery != null) { // create and attempt to authorize a proxy user ugi = UserGroupInformation.createProxyUser(doAsUserFromQuery, ugi); ProxyUsers.authorize(ugi, getRemoteAddr(request)); } } if(LOG.isDebugEnabled()) LOG.debug("getUGI is returning: " + ugi.getShortUserName()); return ugi; }
Example 17
Source File: TestWebHdfsUrl.java From hadoop with Apache License 2.0 | 4 votes |
@Test(timeout=60000) public void testSecureProxyAuthParamsInUrl() throws IOException { Configuration conf = new Configuration(); // fake turning on security so api thinks it should use tokens SecurityUtil.setAuthenticationMethod(KERBEROS, conf); UserGroupInformation.setConfiguration(conf); UserGroupInformation ugi = UserGroupInformation.createRemoteUser("test-user"); ugi.setAuthenticationMethod(KERBEROS); ugi = UserGroupInformation.createProxyUser("test-proxy-user", ugi); UserGroupInformation.setLoginUser(ugi); WebHdfsFileSystem webhdfs = getWebHdfsFileSystem(ugi, conf); Path fsPath = new Path("/"); String tokenString = webhdfs.getDelegationToken().encodeToUrlString(); // send real+effective URL getTokenUrl = webhdfs.toUrl(GetOpParam.Op.GETDELEGATIONTOKEN, fsPath); checkQueryParams( new String[]{ GetOpParam.Op.GETDELEGATIONTOKEN.toQueryString(), new UserParam(ugi.getRealUser().getShortUserName()).toString(), new DoAsParam(ugi.getShortUserName()).toString() }, getTokenUrl); // send real+effective URL renewTokenUrl = webhdfs.toUrl(PutOpParam.Op.RENEWDELEGATIONTOKEN, fsPath, new TokenArgumentParam(tokenString)); checkQueryParams( new String[]{ PutOpParam.Op.RENEWDELEGATIONTOKEN.toQueryString(), new UserParam(ugi.getRealUser().getShortUserName()).toString(), new DoAsParam(ugi.getShortUserName()).toString(), new TokenArgumentParam(tokenString).toString(), }, renewTokenUrl); // send token URL cancelTokenUrl = webhdfs.toUrl(PutOpParam.Op.CANCELDELEGATIONTOKEN, fsPath, new TokenArgumentParam(tokenString)); checkQueryParams( new String[]{ PutOpParam.Op.CANCELDELEGATIONTOKEN.toQueryString(), new UserParam(ugi.getRealUser().getShortUserName()).toString(), new DoAsParam(ugi.getShortUserName()).toString(), new TokenArgumentParam(tokenString).toString(), }, cancelTokenUrl); // send token URL fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath); checkQueryParams( new String[]{ GetOpParam.Op.GETFILESTATUS.toQueryString(), new DelegationParam(tokenString).toString() }, fileStatusUrl); // wipe out internal token to simulate auth always required webhdfs.setDelegationToken(null); // send real+effective cancelTokenUrl = webhdfs.toUrl(PutOpParam.Op.CANCELDELEGATIONTOKEN, fsPath, new TokenArgumentParam(tokenString)); checkQueryParams( new String[]{ PutOpParam.Op.CANCELDELEGATIONTOKEN.toQueryString(), new UserParam(ugi.getRealUser().getShortUserName()).toString(), new DoAsParam(ugi.getShortUserName()).toString(), new TokenArgumentParam(tokenString).toString() }, cancelTokenUrl); // send real+effective fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath); checkQueryParams( new String[]{ GetOpParam.Op.GETFILESTATUS.toQueryString(), new UserParam(ugi.getRealUser().getShortUserName()).toString(), new DoAsParam(ugi.getShortUserName()).toString() }, fileStatusUrl); }
Example 18
Source File: TestWebHdfsUrl.java From hadoop with Apache License 2.0 | 4 votes |
@Test(timeout=60000) public void testSecureAuthParamsInUrl() throws IOException { Configuration conf = new Configuration(); // fake turning on security so api thinks it should use tokens SecurityUtil.setAuthenticationMethod(KERBEROS, conf); UserGroupInformation.setConfiguration(conf); UserGroupInformation ugi = UserGroupInformation.createRemoteUser("test-user"); ugi.setAuthenticationMethod(KERBEROS); UserGroupInformation.setLoginUser(ugi); WebHdfsFileSystem webhdfs = getWebHdfsFileSystem(ugi, conf); Path fsPath = new Path("/"); String tokenString = webhdfs.getDelegationToken().encodeToUrlString(); // send user URL getTokenUrl = webhdfs.toUrl(GetOpParam.Op.GETDELEGATIONTOKEN, fsPath); checkQueryParams( new String[]{ GetOpParam.Op.GETDELEGATIONTOKEN.toQueryString(), new UserParam(ugi.getShortUserName()).toString() }, getTokenUrl); // send user URL renewTokenUrl = webhdfs.toUrl(PutOpParam.Op.RENEWDELEGATIONTOKEN, fsPath, new TokenArgumentParam(tokenString)); checkQueryParams( new String[]{ PutOpParam.Op.RENEWDELEGATIONTOKEN.toQueryString(), new UserParam(ugi.getShortUserName()).toString(), new TokenArgumentParam(tokenString).toString(), }, renewTokenUrl); // send token URL cancelTokenUrl = webhdfs.toUrl(PutOpParam.Op.CANCELDELEGATIONTOKEN, fsPath, new TokenArgumentParam(tokenString)); checkQueryParams( new String[]{ PutOpParam.Op.CANCELDELEGATIONTOKEN.toQueryString(), new UserParam(ugi.getShortUserName()).toString(), new TokenArgumentParam(tokenString).toString(), }, cancelTokenUrl); // send token URL fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath); checkQueryParams( new String[]{ GetOpParam.Op.GETFILESTATUS.toQueryString(), new DelegationParam(tokenString).toString() }, fileStatusUrl); // wipe out internal token to simulate auth always required webhdfs.setDelegationToken(null); // send user cancelTokenUrl = webhdfs.toUrl(PutOpParam.Op.CANCELDELEGATIONTOKEN, fsPath, new TokenArgumentParam(tokenString)); checkQueryParams( new String[]{ PutOpParam.Op.CANCELDELEGATIONTOKEN.toQueryString(), new UserParam(ugi.getShortUserName()).toString(), new TokenArgumentParam(tokenString).toString(), }, cancelTokenUrl); // send user fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath); checkQueryParams( new String[]{ GetOpParam.Op.GETFILESTATUS.toQueryString(), new UserParam(ugi.getShortUserName()).toString() }, fileStatusUrl); }
Example 19
Source File: JspHelper.java From hadoop with Apache License 2.0 | 4 votes |
/** * Get {@link UserGroupInformation} and possibly the delegation token out of * the request. * @param context the ServletContext that is serving this request. * @param request the http request * @param conf configuration * @param secureAuthMethod the AuthenticationMethod used in secure mode. * @param tryUgiParameter Should it try the ugi parameter? * @return a new user from the request * @throws AccessControlException if the request has no token */ public static UserGroupInformation getUGI(ServletContext context, HttpServletRequest request, Configuration conf, final AuthenticationMethod secureAuthMethod, final boolean tryUgiParameter) throws IOException { UserGroupInformation ugi = null; final String usernameFromQuery = getUsernameFromQuery(request, tryUgiParameter); final String doAsUserFromQuery = request.getParameter(DoAsParam.NAME); final String remoteUser; if (UserGroupInformation.isSecurityEnabled()) { remoteUser = request.getRemoteUser(); final String tokenString = request.getParameter(DELEGATION_PARAMETER_NAME); if (tokenString != null) { // Token-based connections need only verify the effective user, and // disallow proxying to different user. Proxy authorization checks // are not required since the checks apply to issuing a token. ugi = getTokenUGI(context, request, tokenString, conf); checkUsername(ugi.getShortUserName(), usernameFromQuery); checkUsername(ugi.getShortUserName(), doAsUserFromQuery); } else if (remoteUser == null) { throw new IOException( "Security enabled but user not authenticated by filter"); } } else { // Security's not on, pull from url or use default web user remoteUser = (usernameFromQuery == null) ? getDefaultWebUserName(conf) // not specified in request : usernameFromQuery; } if (ugi == null) { // security is off, or there's no token ugi = UserGroupInformation.createRemoteUser(remoteUser); checkUsername(ugi.getShortUserName(), usernameFromQuery); if (UserGroupInformation.isSecurityEnabled()) { // This is not necessarily true, could have been auth'ed by user-facing // filter ugi.setAuthenticationMethod(secureAuthMethod); } if (doAsUserFromQuery != null) { // create and attempt to authorize a proxy user ugi = UserGroupInformation.createProxyUser(doAsUserFromQuery, ugi); ProxyUsers.authorize(ugi, getRemoteAddr(request)); } } if(LOG.isDebugEnabled()) LOG.debug("getUGI is returning: " + ugi.getShortUserName()); return ugi; }
Example 20
Source File: HadoopUtils.java From ignite with Apache License 2.0 | 3 votes |
/** * Create UserGroupInformation for specified user and credentials. * * @param user User. * @param credentialsBytes Credentials byte array. */ public static UserGroupInformation createUGI(String user, byte[] credentialsBytes) throws IOException { Credentials credentials = new Credentials(); HadoopUtils.deserialize(credentials, credentialsBytes); UserGroupInformation ugi = UserGroupInformation.createRemoteUser(user); ugi.addCredentials(credentials); if (credentials.numberOfTokens() > 0) ugi.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.TOKEN); return ugi; }