org.apache.flink.test.util.TestingSecurityContext Java Examples
The following examples show how to use
org.apache.flink.test.util.TestingSecurityContext.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: RollingSinkSecuredITCase.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@BeforeClass public static void setup() throws Exception { skipIfHadoopVersionIsNotAppropriate(); LOG.info("starting secure cluster environment for testing"); dataDir = tempFolder.newFolder(); conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, dataDir.getAbsolutePath()); SecureTestEnvironment.prepare(tempFolder); populateSecureConfigurations(); Configuration flinkConfig = new Configuration(); flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_KEYTAB, SecureTestEnvironment.getTestKeytab()); flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_PRINCIPAL, SecureTestEnvironment.getHadoopServicePrincipal()); SecurityConfiguration ctx = new SecurityConfiguration( flinkConfig, Collections.singletonList(securityConfig -> new HadoopModule(securityConfig, conf))); try { TestingSecurityContext.install(ctx, SecureTestEnvironment.getClientSecurityConfigurationMap()); } catch (Exception e) { throw new RuntimeException("Exception occurred while setting up secure test context. Reason: {}", e); } File hdfsSiteXML = new File(dataDir.getAbsolutePath() + "/hdfs-site.xml"); FileWriter writer = new FileWriter(hdfsSiteXML); conf.writeXml(writer); writer.flush(); writer.close(); Map<String, String> map = new HashMap<String, String>(System.getenv()); map.put("HADOOP_CONF_DIR", hdfsSiteXML.getParentFile().getAbsolutePath()); TestBaseUtils.setEnv(map); MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf); builder.checkDataNodeAddrConfig(true); builder.checkDataNodeHostConfig(true); hdfsCluster = builder.build(); dfs = hdfsCluster.getFileSystem(); hdfsURI = "hdfs://" + NetUtils.hostAndPortToUrlString(hdfsCluster.getURI().getHost(), hdfsCluster.getNameNodePort()) + "/"; Configuration configuration = startSecureFlinkClusterWithRecoveryModeEnabled(); miniClusterResource = new MiniClusterResource( new MiniClusterResourceConfiguration.Builder() .setConfiguration(configuration) .setNumberTaskManagers(1) .setNumberSlotsPerTaskManager(4) .build()); miniClusterResource.before(); }
Example #2
Source File: YARNSessionFIFOSecuredITCase.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@BeforeClass public static void setup() { LOG.info("starting secure cluster environment for testing"); YARN_CONFIGURATION.setClass(YarnConfiguration.RM_SCHEDULER, FifoScheduler.class, ResourceScheduler.class); YARN_CONFIGURATION.setInt(YarnConfiguration.NM_PMEM_MB, 768); YARN_CONFIGURATION.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, 512); YARN_CONFIGURATION.set(YarnTestBase.TEST_CLUSTER_NAME_KEY, "flink-yarn-tests-fifo-secured"); SecureTestEnvironment.prepare(tmp); populateYarnSecureConfigurations(YARN_CONFIGURATION, SecureTestEnvironment.getHadoopServicePrincipal(), SecureTestEnvironment.getTestKeytab()); Configuration flinkConfig = new Configuration(); flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_KEYTAB, SecureTestEnvironment.getTestKeytab()); flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_PRINCIPAL, SecureTestEnvironment.getHadoopServicePrincipal()); SecurityConfiguration securityConfig = new SecurityConfiguration( flinkConfig, Collections.singletonList(securityConfig1 -> { // manually override the Hadoop Configuration return new HadoopModule(securityConfig1, YARN_CONFIGURATION); })); try { TestingSecurityContext.install(securityConfig, SecureTestEnvironment.getClientSecurityConfigurationMap()); SecurityUtils.getInstalledContext().runSecured(new Callable<Object>() { @Override public Integer call() { startYARNSecureMode(YARN_CONFIGURATION, SecureTestEnvironment.getHadoopServicePrincipal(), SecureTestEnvironment.getTestKeytab()); return null; } }); } catch (Exception e) { throw new RuntimeException("Exception occurred while setting up secure test context. Reason: {}", e); } }
Example #3
Source File: YARNSessionFIFOSecuredITCase.java From flink with Apache License 2.0 | 4 votes |
@BeforeClass public static void setup() { LOG.info("starting secure cluster environment for testing"); YARN_CONFIGURATION.setClass(YarnConfiguration.RM_SCHEDULER, FifoScheduler.class, ResourceScheduler.class); YARN_CONFIGURATION.setInt(YarnConfiguration.NM_PMEM_MB, 768); YARN_CONFIGURATION.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, 512); YARN_CONFIGURATION.set(YarnTestBase.TEST_CLUSTER_NAME_KEY, "flink-yarn-tests-fifo-secured"); SecureTestEnvironment.prepare(tmp); populateYarnSecureConfigurations(YARN_CONFIGURATION, SecureTestEnvironment.getHadoopServicePrincipal(), SecureTestEnvironment.getTestKeytab()); Configuration flinkConfig = new Configuration(); flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_KEYTAB, SecureTestEnvironment.getTestKeytab()); flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_PRINCIPAL, SecureTestEnvironment.getHadoopServicePrincipal()); SecurityConfiguration securityConfig = new SecurityConfiguration( flinkConfig, Collections.singletonList(securityConfig1 -> { // manually override the Hadoop Configuration return new HadoopModule(securityConfig1, YARN_CONFIGURATION); })); try { TestingSecurityContext.install(securityConfig, SecureTestEnvironment.getClientSecurityConfigurationMap()); SecurityUtils.getInstalledContext().runSecured(new Callable<Object>() { @Override public Integer call() { startYARNSecureMode(YARN_CONFIGURATION, SecureTestEnvironment.getHadoopServicePrincipal(), SecureTestEnvironment.getTestKeytab()); return null; } }); } catch (Exception e) { throw new RuntimeException("Exception occurred while setting up secure test context. Reason: {}", e); } }
Example #4
Source File: YARNSessionFIFOSecuredITCase.java From flink with Apache License 2.0 | 4 votes |
@BeforeClass public static void setup() { LOG.info("starting secure cluster environment for testing"); YARN_CONFIGURATION.setClass(YarnConfiguration.RM_SCHEDULER, FifoScheduler.class, ResourceScheduler.class); YARN_CONFIGURATION.setInt(YarnConfiguration.NM_PMEM_MB, 768); YARN_CONFIGURATION.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, 512); YARN_CONFIGURATION.set(YarnTestBase.TEST_CLUSTER_NAME_KEY, "flink-yarn-tests-fifo-secured"); SecureTestEnvironment.prepare(tmp); populateYarnSecureConfigurations(YARN_CONFIGURATION, SecureTestEnvironment.getHadoopServicePrincipal(), SecureTestEnvironment.getTestKeytab()); Configuration flinkConfig = new Configuration(); flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_KEYTAB, SecureTestEnvironment.getTestKeytab()); flinkConfig.setString(SecurityOptions.KERBEROS_LOGIN_PRINCIPAL, SecureTestEnvironment.getHadoopServicePrincipal()); // Setting customized security module class. TestHadoopModuleFactory.hadoopConfiguration = YARN_CONFIGURATION; flinkConfig.set(SecurityOptions.SECURITY_MODULE_FACTORY_CLASSES, Collections.singletonList("org.apache.flink.yarn.util.TestHadoopModuleFactory")); flinkConfig.set(SecurityOptions.SECURITY_CONTEXT_FACTORY_CLASSES, Collections.singletonList("org.apache.flink.yarn.util.TestHadoopSecurityContextFactory")); SecurityConfiguration securityConfig = new SecurityConfiguration(flinkConfig); try { TestingSecurityContext.install(securityConfig, SecureTestEnvironment.getClientSecurityConfigurationMap()); // This is needed to ensure that SecurityUtils are run within a ugi.doAs section // Since we already logged in here in @BeforeClass, even a no-op security context will still work. Assert.assertTrue("HadoopSecurityContext must be installed", SecurityUtils.getInstalledContext() instanceof HadoopSecurityContext); SecurityUtils.getInstalledContext().runSecured(new Callable<Object>() { @Override public Integer call() { startYARNSecureMode(YARN_CONFIGURATION, SecureTestEnvironment.getHadoopServicePrincipal(), SecureTestEnvironment.getTestKeytab()); return null; } }); } catch (Exception e) { throw new RuntimeException("Exception occurred while setting up secure test context. Reason: {}", e); } }