Java Code Examples for org.apache.hadoop.security.Credentials#writeTokenStorageFile()
The following examples show how to use
org.apache.hadoop.security.Credentials#writeTokenStorageFile() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestTokenCache.java From hadoop with Apache License 2.0 | 6 votes |
@SuppressWarnings("deprecation") @Test public void testGetTokensForNamenodes() throws IOException, URISyntaxException { Path TEST_ROOT_DIR = new Path(System.getProperty("test.build.data", "test/build/data")); // ick, but need fq path minus file:/ String binaryTokenFile = FileSystem.getLocal(conf) .makeQualified(new Path(TEST_ROOT_DIR, "tokenFile")).toUri() .getPath(); MockFileSystem fs1 = createFileSystemForServiceName("service1"); Credentials creds = new Credentials(); Token<?> token1 = fs1.getDelegationToken(renewer); creds.addToken(token1.getService(), token1); // wait to set, else the obtain tokens call above will fail with FNF conf.set(MRJobConfig.MAPREDUCE_JOB_CREDENTIALS_BINARY, binaryTokenFile); creds.writeTokenStorageFile(new Path(binaryTokenFile), conf); TokenCache.obtainTokensForNamenodesInternal(fs1, creds, conf); String fs_addr = fs1.getCanonicalServiceName(); Token<?> nnt = TokenCache.getDelegationToken(creds, fs_addr); assertNotNull("Token for nn is null", nnt); }
Example 2
Source File: TestTokenCache.java From big-c with Apache License 2.0 | 6 votes |
@SuppressWarnings("deprecation") @Test public void testGetTokensForNamenodes() throws IOException, URISyntaxException { Path TEST_ROOT_DIR = new Path(System.getProperty("test.build.data", "test/build/data")); // ick, but need fq path minus file:/ String binaryTokenFile = FileSystem.getLocal(conf) .makeQualified(new Path(TEST_ROOT_DIR, "tokenFile")).toUri() .getPath(); MockFileSystem fs1 = createFileSystemForServiceName("service1"); Credentials creds = new Credentials(); Token<?> token1 = fs1.getDelegationToken(renewer); creds.addToken(token1.getService(), token1); // wait to set, else the obtain tokens call above will fail with FNF conf.set(MRJobConfig.MAPREDUCE_JOB_CREDENTIALS_BINARY, binaryTokenFile); creds.writeTokenStorageFile(new Path(binaryTokenFile), conf); TokenCache.obtainTokensForNamenodesInternal(fs1, creds, conf); String fs_addr = fs1.getCanonicalServiceName(); Token<?> nnt = TokenCache.getDelegationToken(creds, fs_addr); assertNotNull("Token for nn is null", nnt); }
Example 3
Source File: TestTokenCache.java From incubator-tez with Apache License 2.0 | 5 votes |
@Test @SuppressWarnings("deprecation") public void testBinaryCredentials() throws Exception { String binaryTokenFile = null; try { Path TEST_ROOT_DIR = new Path("target"); binaryTokenFile = FileSystem.getLocal(conf).makeQualified( new Path(TEST_ROOT_DIR, "tokenFile")).toUri().getPath(); MockFileSystem fs1 = createFileSystemForServiceName("service1"); MockFileSystem fs2 = createFileSystemForServiceName("service2"); MockFileSystem fs3 = createFileSystemForServiceName("service3"); // get the tokens for fs1 & fs2 and write out to binary creds file Credentials creds = new Credentials(); Token<?> token1 = fs1.getDelegationToken(renewer); Token<?> token2 = fs2.getDelegationToken(renewer); creds.addToken(token1.getService(), token1); creds.addToken(token2.getService(), token2); creds.writeTokenStorageFile(new Path(binaryTokenFile), conf); Credentials newCreds = new Credentials(); TokenCache.mergeBinaryTokens(newCreds, conf, binaryTokenFile); Assert.assertTrue(newCreds.getAllTokens().size() > 0); checkTokens(creds, newCreds); } finally { if (binaryTokenFile != null) { try { FileSystem.getLocal(conf).delete(new Path(binaryTokenFile)); } catch (IOException e) { // Ignore } } } }
Example 4
Source File: TestTokenCache.java From tez with Apache License 2.0 | 5 votes |
@Test(timeout = 5000) @SuppressWarnings("deprecation") public void testBinaryCredentials() throws Exception { String binaryTokenFile = null; try { Path TEST_ROOT_DIR = new Path("target"); binaryTokenFile = FileSystem.getLocal(conf).makeQualified( new Path(TEST_ROOT_DIR, "tokenFile")).toUri().getPath(); MockFileSystem fs1 = createFileSystemForServiceName("service1"); MockFileSystem fs2 = createFileSystemForServiceName("service2"); // get the tokens for fs1 & fs2 and write out to binary creds file Credentials creds = new Credentials(); Token<?> token1 = fs1.getDelegationToken(renewer); Token<?> token2 = fs2.getDelegationToken(renewer); creds.addToken(token1.getService(), token1); creds.addToken(token2.getService(), token2); creds.writeTokenStorageFile(new Path(binaryTokenFile), conf); Credentials newCreds = new Credentials(); TokenCache.mergeBinaryTokens(newCreds, conf, binaryTokenFile); Assert.assertTrue(newCreds.getAllTokens().size() > 0); checkTokens(creds, newCreds); } finally { if (binaryTokenFile != null) { try { FileSystem.getLocal(conf).delete(new Path(binaryTokenFile)); } catch (IOException e) { // Ignore } } } }
Example 5
Source File: UtilsTest.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@Test public void testCreateTaskExecutorCredentials() throws Exception { File root = temporaryFolder.getRoot(); File home = new File(root, "home"); boolean created = home.mkdir(); assertTrue(created); Configuration flinkConf = new Configuration(); YarnConfiguration yarnConf = new YarnConfiguration(); Map<String, String> env = new HashMap<>(); env.put(YarnConfigKeys.ENV_APP_ID, "foo"); env.put(YarnConfigKeys.ENV_CLIENT_HOME_DIR, home.getAbsolutePath()); env.put(YarnConfigKeys.ENV_CLIENT_SHIP_FILES, ""); env.put(YarnConfigKeys.ENV_FLINK_CLASSPATH, ""); env.put(YarnConfigKeys.ENV_HADOOP_USER_NAME, "foo"); env.put(YarnConfigKeys.FLINK_JAR_PATH, root.toURI().toString()); env = Collections.unmodifiableMap(env); File credentialFile = temporaryFolder.newFile("container_tokens"); final Text amRmTokenKind = AMRMTokenIdentifier.KIND_NAME; final Text hdfsDelegationTokenKind = new Text("HDFS_DELEGATION_TOKEN"); final Text service = new Text("test-service"); Credentials amCredentials = new Credentials(); amCredentials.addToken(amRmTokenKind, new Token<>(new byte[4], new byte[4], amRmTokenKind, service)); amCredentials.addToken(hdfsDelegationTokenKind, new Token<>(new byte[4], new byte[4], hdfsDelegationTokenKind, service)); amCredentials.writeTokenStorageFile(new org.apache.hadoop.fs.Path(credentialFile.getAbsolutePath()), yarnConf); ContaineredTaskManagerParameters tmParams = new ContaineredTaskManagerParameters(64, 64, 16, 1, new HashMap<>(1)); Configuration taskManagerConf = new Configuration(); String workingDirectory = root.getAbsolutePath(); Class<?> taskManagerMainClass = YarnTaskExecutorRunner.class; ContainerLaunchContext ctx; final Map<String, String> originalEnv = System.getenv(); try { Map<String, String> systemEnv = new HashMap<>(originalEnv); systemEnv.put("HADOOP_TOKEN_FILE_LOCATION", credentialFile.getAbsolutePath()); CommonTestUtils.setEnv(systemEnv); ctx = Utils.createTaskExecutorContext(flinkConf, yarnConf, env, tmParams, taskManagerConf, workingDirectory, taskManagerMainClass, LOG); } finally { CommonTestUtils.setEnv(originalEnv); } Credentials credentials = new Credentials(); try (DataInputStream dis = new DataInputStream(new ByteArrayInputStream(ctx.getTokens().array()))) { credentials.readTokenStorageStream(dis); } Collection<Token<? extends TokenIdentifier>> tokens = credentials.getAllTokens(); boolean hasHdfsDelegationToken = false; boolean hasAmRmToken = false; for (Token<? extends TokenIdentifier> token : tokens) { if (token.getKind().equals(amRmTokenKind)) { hasAmRmToken = true; } else if (token.getKind().equals(hdfsDelegationTokenKind)) { hasHdfsDelegationToken = true; } } assertTrue(hasHdfsDelegationToken); assertFalse(hasAmRmToken); }
Example 6
Source File: UtilsTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testCreateTaskExecutorCredentials() throws Exception { File root = temporaryFolder.getRoot(); File home = new File(root, "home"); boolean created = home.mkdir(); assertTrue(created); Configuration flinkConf = new Configuration(); YarnConfiguration yarnConf = new YarnConfiguration(); Map<String, String> env = new HashMap<>(); env.put(YarnConfigKeys.ENV_APP_ID, "foo"); env.put(YarnConfigKeys.ENV_CLIENT_HOME_DIR, home.getAbsolutePath()); env.put(YarnConfigKeys.ENV_CLIENT_SHIP_FILES, ""); env.put(YarnConfigKeys.ENV_FLINK_CLASSPATH, ""); env.put(YarnConfigKeys.ENV_HADOOP_USER_NAME, "foo"); env.put(YarnConfigKeys.FLINK_JAR_PATH, root.toURI().toString()); env = Collections.unmodifiableMap(env); File credentialFile = temporaryFolder.newFile("container_tokens"); final Text amRmTokenKind = AMRMTokenIdentifier.KIND_NAME; final Text hdfsDelegationTokenKind = new Text("HDFS_DELEGATION_TOKEN"); final Text service = new Text("test-service"); Credentials amCredentials = new Credentials(); amCredentials.addToken(amRmTokenKind, new Token<>(new byte[4], new byte[4], amRmTokenKind, service)); amCredentials.addToken(hdfsDelegationTokenKind, new Token<>(new byte[4], new byte[4], hdfsDelegationTokenKind, service)); amCredentials.writeTokenStorageFile(new org.apache.hadoop.fs.Path(credentialFile.getAbsolutePath()), yarnConf); ContaineredTaskManagerParameters tmParams = new ContaineredTaskManagerParameters(64, 64, 16, 1, new HashMap<>(1)); Configuration taskManagerConf = new Configuration(); String workingDirectory = root.getAbsolutePath(); Class<?> taskManagerMainClass = YarnTaskExecutorRunner.class; ContainerLaunchContext ctx; final Map<String, String> originalEnv = System.getenv(); try { Map<String, String> systemEnv = new HashMap<>(originalEnv); systemEnv.put("HADOOP_TOKEN_FILE_LOCATION", credentialFile.getAbsolutePath()); CommonTestUtils.setEnv(systemEnv); ctx = Utils.createTaskExecutorContext(flinkConf, yarnConf, env, tmParams, taskManagerConf, workingDirectory, taskManagerMainClass, LOG); } finally { CommonTestUtils.setEnv(originalEnv); } Credentials credentials = new Credentials(); try (DataInputStream dis = new DataInputStream(new ByteArrayInputStream(ctx.getTokens().array()))) { credentials.readTokenStorageStream(dis); } Collection<Token<? extends TokenIdentifier>> tokens = credentials.getAllTokens(); boolean hasHdfsDelegationToken = false; boolean hasAmRmToken = false; for (Token<? extends TokenIdentifier> token : tokens) { if (token.getKind().equals(amRmTokenKind)) { hasAmRmToken = true; } else if (token.getKind().equals(hdfsDelegationTokenKind)) { hasHdfsDelegationToken = true; } } assertTrue(hasHdfsDelegationToken); assertFalse(hasAmRmToken); }
Example 7
Source File: TestMRAppMaster.java From hadoop with Apache License 2.0 | 4 votes |
@Test public void testMRAppMasterCredentials() throws Exception { Logger rootLogger = LogManager.getRootLogger(); rootLogger.setLevel(Level.DEBUG); // Simulate credentials passed to AM via client->RM->NM Credentials credentials = new Credentials(); byte[] identifier = "MyIdentifier".getBytes(); byte[] password = "MyPassword".getBytes(); Text kind = new Text("MyTokenKind"); Text service = new Text("host:port"); Token<? extends TokenIdentifier> myToken = new Token<TokenIdentifier>(identifier, password, kind, service); Text tokenAlias = new Text("myToken"); credentials.addToken(tokenAlias, myToken); Text appTokenService = new Text("localhost:0"); Token<AMRMTokenIdentifier> appToken = new Token<AMRMTokenIdentifier>(identifier, password, AMRMTokenIdentifier.KIND_NAME, appTokenService); credentials.addToken(appTokenService, appToken); Text keyAlias = new Text("mySecretKeyAlias"); credentials.addSecretKey(keyAlias, "mySecretKey".getBytes()); Token<? extends TokenIdentifier> storedToken = credentials.getToken(tokenAlias); JobConf conf = new JobConf(); Path tokenFilePath = new Path(testDir.getAbsolutePath(), "tokens-file"); Map<String, String> newEnv = new HashMap<String, String>(); newEnv.put(UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION, tokenFilePath .toUri().getPath()); setNewEnvironmentHack(newEnv); credentials.writeTokenStorageFile(tokenFilePath, conf); ApplicationId appId = ApplicationId.newInstance(12345, 56); ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.newInstance(appId, 1); ContainerId containerId = ContainerId.newContainerId(applicationAttemptId, 546); String userName = UserGroupInformation.getCurrentUser().getShortUserName(); // Create staging dir, so MRAppMaster doesn't barf. File stagingDir = new File(MRApps.getStagingAreaDir(conf, userName).toString()); stagingDir.mkdirs(); // Set login-user to null as that is how real world MRApp starts with. // This is null is the reason why token-file is read by UGI. UserGroupInformation.setLoginUser(null); MRAppMasterTest appMaster = new MRAppMasterTest(applicationAttemptId, containerId, "host", -1, -1, System.currentTimeMillis(), false, true); MRAppMaster.initAndStartAppMaster(appMaster, conf, userName); // Now validate the task credentials Credentials appMasterCreds = appMaster.getCredentials(); Assert.assertNotNull(appMasterCreds); Assert.assertEquals(1, appMasterCreds.numberOfSecretKeys()); Assert.assertEquals(1, appMasterCreds.numberOfTokens()); // Validate the tokens - app token should not be present Token<? extends TokenIdentifier> usedToken = appMasterCreds.getToken(tokenAlias); Assert.assertNotNull(usedToken); Assert.assertEquals(storedToken, usedToken); // Validate the keys byte[] usedKey = appMasterCreds.getSecretKey(keyAlias); Assert.assertNotNull(usedKey); Assert.assertEquals("mySecretKey", new String(usedKey)); // The credentials should also be added to conf so that OuputCommitter can // access it - app token should not be present Credentials confCredentials = conf.getCredentials(); Assert.assertEquals(1, confCredentials.numberOfSecretKeys()); Assert.assertEquals(1, confCredentials.numberOfTokens()); Assert.assertEquals(storedToken, confCredentials.getToken(tokenAlias)); Assert.assertEquals("mySecretKey", new String(confCredentials.getSecretKey(keyAlias))); // Verify the AM's ugi - app token should be present Credentials ugiCredentials = appMaster.getUgi().getCredentials(); Assert.assertEquals(1, ugiCredentials.numberOfSecretKeys()); Assert.assertEquals(2, ugiCredentials.numberOfTokens()); Assert.assertEquals(storedToken, ugiCredentials.getToken(tokenAlias)); Assert.assertEquals(appToken, ugiCredentials.getToken(appTokenService)); Assert.assertEquals("mySecretKey", new String(ugiCredentials.getSecretKey(keyAlias))); }
Example 8
Source File: TestGenericOptionsParser.java From hadoop with Apache License 2.0 | 4 votes |
/** * testing -fileCache option * @throws IOException */ public void testTokenCacheOption() throws IOException { FileSystem localFs = FileSystem.getLocal(conf); File tmpFile = new File(testDir, "tokenCacheFile"); if(tmpFile.exists()) { tmpFile.delete(); } String[] args = new String[2]; // pass a files option args[0] = "-tokenCacheFile"; args[1] = tmpFile.toURI().toString(); // test non existing file Throwable th = null; try { new GenericOptionsParser(conf, args); } catch (Exception e) { th = e; } assertNotNull(th); assertTrue("FileNotFoundException is not thrown", th instanceof FileNotFoundException); // create file Path tmpPath = localFs.makeQualified(new Path(tmpFile.toString())); Token<?> token = new Token<AbstractDelegationTokenIdentifier>( "identifier".getBytes(), "password".getBytes(), new Text("token-kind"), new Text("token-service")); Credentials creds = new Credentials(); creds.addToken(new Text("token-alias"), token); creds.writeTokenStorageFile(tmpPath, conf); new GenericOptionsParser(conf, args); String fileName = conf.get("mapreduce.job.credentials.binary"); assertNotNull("files is null", fileName); assertEquals("files option does not match", tmpPath.toString(), fileName); Credentials ugiCreds = UserGroupInformation.getCurrentUser().getCredentials(); assertEquals(1, ugiCreds.numberOfTokens()); Token<?> ugiToken = ugiCreds.getToken(new Text("token-alias")); assertNotNull(ugiToken); assertEquals(token, ugiToken); localFs.delete(new Path(testDir.getAbsolutePath()), true); }
Example 9
Source File: TestMRAppMaster.java From big-c with Apache License 2.0 | 4 votes |
@Test public void testMRAppMasterCredentials() throws Exception { Logger rootLogger = LogManager.getRootLogger(); rootLogger.setLevel(Level.DEBUG); // Simulate credentials passed to AM via client->RM->NM Credentials credentials = new Credentials(); byte[] identifier = "MyIdentifier".getBytes(); byte[] password = "MyPassword".getBytes(); Text kind = new Text("MyTokenKind"); Text service = new Text("host:port"); Token<? extends TokenIdentifier> myToken = new Token<TokenIdentifier>(identifier, password, kind, service); Text tokenAlias = new Text("myToken"); credentials.addToken(tokenAlias, myToken); Text appTokenService = new Text("localhost:0"); Token<AMRMTokenIdentifier> appToken = new Token<AMRMTokenIdentifier>(identifier, password, AMRMTokenIdentifier.KIND_NAME, appTokenService); credentials.addToken(appTokenService, appToken); Text keyAlias = new Text("mySecretKeyAlias"); credentials.addSecretKey(keyAlias, "mySecretKey".getBytes()); Token<? extends TokenIdentifier> storedToken = credentials.getToken(tokenAlias); JobConf conf = new JobConf(); Path tokenFilePath = new Path(testDir.getAbsolutePath(), "tokens-file"); Map<String, String> newEnv = new HashMap<String, String>(); newEnv.put(UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION, tokenFilePath .toUri().getPath()); setNewEnvironmentHack(newEnv); credentials.writeTokenStorageFile(tokenFilePath, conf); ApplicationId appId = ApplicationId.newInstance(12345, 56); ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.newInstance(appId, 1); ContainerId containerId = ContainerId.newContainerId(applicationAttemptId, 546); String userName = UserGroupInformation.getCurrentUser().getShortUserName(); // Create staging dir, so MRAppMaster doesn't barf. File stagingDir = new File(MRApps.getStagingAreaDir(conf, userName).toString()); stagingDir.mkdirs(); // Set login-user to null as that is how real world MRApp starts with. // This is null is the reason why token-file is read by UGI. UserGroupInformation.setLoginUser(null); MRAppMasterTest appMaster = new MRAppMasterTest(applicationAttemptId, containerId, "host", -1, -1, System.currentTimeMillis(), false, true); MRAppMaster.initAndStartAppMaster(appMaster, conf, userName); // Now validate the task credentials Credentials appMasterCreds = appMaster.getCredentials(); Assert.assertNotNull(appMasterCreds); Assert.assertEquals(1, appMasterCreds.numberOfSecretKeys()); Assert.assertEquals(1, appMasterCreds.numberOfTokens()); // Validate the tokens - app token should not be present Token<? extends TokenIdentifier> usedToken = appMasterCreds.getToken(tokenAlias); Assert.assertNotNull(usedToken); Assert.assertEquals(storedToken, usedToken); // Validate the keys byte[] usedKey = appMasterCreds.getSecretKey(keyAlias); Assert.assertNotNull(usedKey); Assert.assertEquals("mySecretKey", new String(usedKey)); // The credentials should also be added to conf so that OuputCommitter can // access it - app token should not be present Credentials confCredentials = conf.getCredentials(); Assert.assertEquals(1, confCredentials.numberOfSecretKeys()); Assert.assertEquals(1, confCredentials.numberOfTokens()); Assert.assertEquals(storedToken, confCredentials.getToken(tokenAlias)); Assert.assertEquals("mySecretKey", new String(confCredentials.getSecretKey(keyAlias))); // Verify the AM's ugi - app token should be present Credentials ugiCredentials = appMaster.getUgi().getCredentials(); Assert.assertEquals(1, ugiCredentials.numberOfSecretKeys()); Assert.assertEquals(2, ugiCredentials.numberOfTokens()); Assert.assertEquals(storedToken, ugiCredentials.getToken(tokenAlias)); Assert.assertEquals(appToken, ugiCredentials.getToken(appTokenService)); Assert.assertEquals("mySecretKey", new String(ugiCredentials.getSecretKey(keyAlias))); }
Example 10
Source File: TestGenericOptionsParser.java From big-c with Apache License 2.0 | 4 votes |
/** * testing -fileCache option * @throws IOException */ public void testTokenCacheOption() throws IOException { FileSystem localFs = FileSystem.getLocal(conf); File tmpFile = new File(testDir, "tokenCacheFile"); if(tmpFile.exists()) { tmpFile.delete(); } String[] args = new String[2]; // pass a files option args[0] = "-tokenCacheFile"; args[1] = tmpFile.toURI().toString(); // test non existing file Throwable th = null; try { new GenericOptionsParser(conf, args); } catch (Exception e) { th = e; } assertNotNull(th); assertTrue("FileNotFoundException is not thrown", th instanceof FileNotFoundException); // create file Path tmpPath = localFs.makeQualified(new Path(tmpFile.toString())); Token<?> token = new Token<AbstractDelegationTokenIdentifier>( "identifier".getBytes(), "password".getBytes(), new Text("token-kind"), new Text("token-service")); Credentials creds = new Credentials(); creds.addToken(new Text("token-alias"), token); creds.writeTokenStorageFile(tmpPath, conf); new GenericOptionsParser(conf, args); String fileName = conf.get("mapreduce.job.credentials.binary"); assertNotNull("files is null", fileName); assertEquals("files option does not match", tmpPath.toString(), fileName); Credentials ugiCreds = UserGroupInformation.getCurrentUser().getCredentials(); assertEquals(1, ugiCreds.numberOfTokens()); Token<?> ugiToken = ugiCreds.getToken(new Text("token-alias")); assertNotNull(ugiToken); assertEquals(token, ugiToken); localFs.delete(new Path(testDir.getAbsolutePath()), true); }
Example 11
Source File: UtilsTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testCreateTaskExecutorCredentials() throws Exception { File root = temporaryFolder.getRoot(); File home = new File(root, "home"); boolean created = home.mkdir(); assertTrue(created); Configuration flinkConf = new Configuration(); YarnConfiguration yarnConf = new YarnConfiguration(); Map<String, String> env = new HashMap<>(); env.put(YarnConfigKeys.ENV_APP_ID, "foo"); env.put(YarnConfigKeys.ENV_CLIENT_HOME_DIR, home.getAbsolutePath()); env.put(YarnConfigKeys.ENV_CLIENT_SHIP_FILES, ""); env.put(YarnConfigKeys.ENV_FLINK_CLASSPATH, ""); env.put(YarnConfigKeys.ENV_HADOOP_USER_NAME, "foo"); env.put(YarnConfigKeys.FLINK_DIST_JAR, new YarnLocalResourceDescriptor( "flink.jar", new Path(root.toURI()), 0, System.currentTimeMillis(), LocalResourceVisibility.APPLICATION).toString()); env = Collections.unmodifiableMap(env); File credentialFile = temporaryFolder.newFile("container_tokens"); final Text amRmTokenKind = AMRMTokenIdentifier.KIND_NAME; final Text hdfsDelegationTokenKind = new Text("HDFS_DELEGATION_TOKEN"); final Text service = new Text("test-service"); Credentials amCredentials = new Credentials(); amCredentials.addToken(amRmTokenKind, new Token<>(new byte[4], new byte[4], amRmTokenKind, service)); amCredentials.addToken(hdfsDelegationTokenKind, new Token<>(new byte[4], new byte[4], hdfsDelegationTokenKind, service)); amCredentials.writeTokenStorageFile(new org.apache.hadoop.fs.Path(credentialFile.getAbsolutePath()), yarnConf); TaskExecutorProcessSpec spec = TaskExecutorProcessUtils .newProcessSpecBuilder(flinkConf) .withTotalProcessMemory(MemorySize.parse("1g")) .build(); ContaineredTaskManagerParameters tmParams = new ContaineredTaskManagerParameters(spec, new HashMap<>(1)); Configuration taskManagerConf = new Configuration(); String workingDirectory = root.getAbsolutePath(); Class<?> taskManagerMainClass = YarnTaskExecutorRunner.class; ContainerLaunchContext ctx; final Map<String, String> originalEnv = System.getenv(); try { Map<String, String> systemEnv = new HashMap<>(originalEnv); systemEnv.put("HADOOP_TOKEN_FILE_LOCATION", credentialFile.getAbsolutePath()); CommonTestUtils.setEnv(systemEnv); ctx = Utils.createTaskExecutorContext(flinkConf, yarnConf, env, tmParams, "", workingDirectory, taskManagerMainClass, LOG); } finally { CommonTestUtils.setEnv(originalEnv); } Credentials credentials = new Credentials(); try (DataInputStream dis = new DataInputStream(new ByteArrayInputStream(ctx.getTokens().array()))) { credentials.readTokenStorageStream(dis); } Collection<Token<? extends TokenIdentifier>> tokens = credentials.getAllTokens(); boolean hasHdfsDelegationToken = false; boolean hasAmRmToken = false; for (Token<? extends TokenIdentifier> token : tokens) { if (token.getKind().equals(amRmTokenKind)) { hasAmRmToken = true; } else if (token.getKind().equals(hdfsDelegationTokenKind)) { hasHdfsDelegationToken = true; } } assertTrue(hasHdfsDelegationToken); assertFalse(hasAmRmToken); }
Example 12
Source File: YarnHelixUtils.java From incubator-gobblin with Apache License 2.0 | 3 votes |
/** * Write a {@link Token} to a given file. * * @param token the token to write * @param tokenFilePath the token file path * @param configuration a {@link Configuration} object carrying Hadoop configuration properties * @throws IOException */ public static void writeTokenToFile(Token<? extends TokenIdentifier> token, Path tokenFilePath, Configuration configuration) throws IOException { Credentials credentials = new Credentials(); credentials.addToken(token.getService(), token); credentials.writeTokenStorageFile(tokenFilePath, configuration); }