Java Code Examples for org.apache.hadoop.hive.conf.HiveConf#getBoolVar()
The following examples show how to use
org.apache.hadoop.hive.conf.HiveConf#getBoolVar() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HiveWriteUtils.java From presto with Apache License 2.0 | 6 votes |
public static RecordWriter createRecordWriter(Path target, JobConf conf, Properties properties, String outputFormatName, ConnectorSession session) { try { boolean compress = HiveConf.getBoolVar(conf, COMPRESSRESULT); if (outputFormatName.equals(MapredParquetOutputFormat.class.getName())) { return createParquetWriter(target, conf, properties, session); } if (outputFormatName.equals(HiveIgnoreKeyTextOutputFormat.class.getName())) { return new TextRecordWriter(target, conf, properties, compress); } if (outputFormatName.equals(HiveSequenceFileOutputFormat.class.getName())) { return new SequenceFileRecordWriter(target, conf, Text.class, compress); } if (outputFormatName.equals(AvroContainerOutputFormat.class.getName())) { return new AvroRecordWriter(target, conf, compress, properties); } Object writer = Class.forName(outputFormatName).getConstructor().newInstance(); return ((HiveOutputFormat<?, ?>) writer).getHiveRecordWriter(conf, target, Text.class, compress, properties, Reporter.NULL); } catch (IOException | ReflectiveOperationException e) { throw new PrestoException(HIVE_WRITER_DATA_ERROR, e); } }
Example 2
Source File: HiveStoragePlugin.java From dremio-oss with Apache License 2.0 | 6 votes |
public HiveStoragePlugin(HiveConf hiveConf, PluginManager pf4jManager, SabotContext context, String name) { super(context, name); this.isCoordinator = context.isCoordinator(); this.hiveConf = hiveConf; this.pf4jManager = pf4jManager; this.sabotConfig = context.getConfig(); this.hiveSettings = new HiveSettings(context.getOptionManager()); this.optionManager = context.getOptionManager(); this.dremioConfig = context.getDremioConfig(); storageImpersonationEnabled = hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS); // Hive Metastore impersonation is enabled if: // - "hive.security.authorization.enabled" is set to true, // - "hive.metastore.execute.setugi" is set to true (in SASL disabled scenarios) or // - "hive.metastore.sasl.enabled" is set to true in which case all metastore calls are impersonated as // the authenticated user. this.metastoreImpersonationEnabled = hiveConf.getBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED) || hiveConf.getBoolVar(ConfVars.METASTORE_EXECUTE_SET_UGI) || hiveConf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_SASL); }
Example 3
Source File: Hive3StoragePlugin.java From dremio-oss with Apache License 2.0 | 6 votes |
public Hive3StoragePlugin(HiveConf hiveConf, PluginManager pf4jManager, SabotContext context, String name) { super(context, name); this.isCoordinator = context.isCoordinator(); this.hiveConf = hiveConf; this.pf4jManager = pf4jManager; this.sabotConfig = context.getConfig(); this.hiveSettings = new HiveSettings(context.getOptionManager()); this.optionManager = context.getOptionManager(); this.dremioConfig = context.getDremioConfig(); storageImpersonationEnabled = hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS); // Hive Metastore impersonation is enabled if: // - "hive.security.authorization.enabled" is set to true, // - "hive.metastore.execute.setugi" is set to true (in SASL disabled scenarios) or // - "hive.metastore.sasl.enabled" is set to true in which case all metastore calls are impersonated as // the authenticated user. this.metastoreImpersonationEnabled = hiveConf.getBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED) || hiveConf.getBoolVar(ConfVars.METASTORE_EXECUTE_SET_UGI) || hiveConf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_SASL); }
Example 4
Source File: HiveAuthzBinding.java From incubator-sentry with Apache License 2.0 | 6 votes |
private void validateHiveMetaStoreConfig(HiveConf hiveConf, HiveAuthzConf authzConf) throws InvalidConfigurationException{ boolean isTestingMode = Boolean.parseBoolean(Strings.nullToEmpty( authzConf.get(AuthzConfVars.SENTRY_TESTING_MODE.getVar())).trim()); LOG.debug("Testing mode is " + isTestingMode); if(!isTestingMode) { boolean sasl = hiveConf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_SASL); if(!sasl) { throw new InvalidConfigurationException( ConfVars.METASTORE_USE_THRIFT_SASL + " can't be false in non-testing mode"); } } else { boolean setUgi = hiveConf.getBoolVar(ConfVars.METASTORE_EXECUTE_SET_UGI); if(!setUgi) { throw new InvalidConfigurationException( ConfVars.METASTORE_EXECUTE_SET_UGI.toString() + " can't be false in non secure mode"); } } }
Example 5
Source File: HiveAuthzBinding.java From incubator-sentry with Apache License 2.0 | 5 votes |
private void validateHiveServer2Config(HiveConf hiveConf, HiveAuthzConf authzConf) throws InvalidConfigurationException{ boolean isTestingMode = Boolean.parseBoolean(Strings.nullToEmpty( authzConf.get(AuthzConfVars.SENTRY_TESTING_MODE.getVar())).trim()); LOG.debug("Testing mode is " + isTestingMode); if(!isTestingMode) { String authMethod = Strings.nullToEmpty(hiveConf.getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION)).trim(); if("none".equalsIgnoreCase(authMethod)) { throw new InvalidConfigurationException(ConfVars.HIVE_SERVER2_AUTHENTICATION + " can't be none in non-testing mode"); } boolean impersonation = hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS); boolean allowImpersonation = Boolean.parseBoolean(Strings.nullToEmpty( authzConf.get(AuthzConfVars.AUTHZ_ALLOW_HIVE_IMPERSONATION.getVar())).trim()); if(impersonation && !allowImpersonation) { LOG.error("Role based authorization does not work with HiveServer2 impersonation"); throw new InvalidConfigurationException(ConfVars.HIVE_SERVER2_ENABLE_DOAS + " can't be set to true in non-testing mode"); } } String defaultUmask = hiveConf.get(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY); if("077".equalsIgnoreCase(defaultUmask)) { LOG.error("HiveServer2 required a default umask of 077"); throw new InvalidConfigurationException(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY + " should be 077 in non-testing mode"); } }
Example 6
Source File: SentryAuthorizerFactory.java From incubator-sentry with Apache License 2.0 | 5 votes |
private HiveAuthzSessionContext applyTestSettings(HiveAuthzSessionContext ctx, HiveConf conf) { if (conf.getBoolVar(ConfVars.HIVE_TEST_AUTHORIZATION_SQLSTD_HS2_MODE) && ctx.getClientType() == CLIENT_TYPE.HIVECLI) { // create new session ctx object with HS2 as client type HiveAuthzSessionContext.Builder ctxBuilder = new HiveAuthzSessionContext.Builder(ctx); ctxBuilder.setClientType(CLIENT_TYPE.HIVESERVER2); return ctxBuilder.build(); } return ctx; }
Example 7
Source File: SentryAuthorizerFactory.java From incubator-sentry with Apache License 2.0 | 5 votes |
private void assertHiveCliAuthDisabled(HiveConf conf, HiveAuthzSessionContext ctx) throws HiveAuthzPluginException { if (ctx.getClientType() == CLIENT_TYPE.HIVECLI && conf.getBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED)) { throw new HiveAuthzPluginException( "SQL standards based authorization should not be enabled from hive cli" + "Instead the use of storage based authorization in hive metastore is reccomended. Set " + ConfVars.HIVE_AUTHORIZATION_ENABLED.varname + "=false to disable authz within cli"); } }
Example 8
Source File: DefaultSentryAccessController.java From incubator-sentry with Apache License 2.0 | 5 votes |
@Override public void applyAuthorizationConfigPolicy(HiveConf hiveConf) throws HiveAuthzPluginException { // Apply rest of the configuration only to HiveServer2 if (ctx.getClientType() != CLIENT_TYPE.HIVESERVER2 || !hiveConf.getBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED)) { throw new HiveAuthzPluginException("Sentry just support for hiveserver2"); } }
Example 9
Source File: NiFiOrcUtils.java From localization_nifi with Apache License 2.0 | 4 votes |
public static OrcFlowFileWriter createWriter(OutputStream flowFileOutputStream, Path path, Configuration conf, TypeInfo orcSchema, long stripeSize, CompressionKind compress, int bufferSize) throws IOException { int rowIndexStride = HiveConf.getIntVar(conf, HIVE_ORC_DEFAULT_ROW_INDEX_STRIDE); boolean addBlockPadding = HiveConf.getBoolVar(conf, HIVE_ORC_DEFAULT_BLOCK_PADDING); String versionName = HiveConf.getVar(conf, HIVE_ORC_WRITE_FORMAT); OrcFile.Version versionValue = (versionName == null) ? OrcFile.Version.CURRENT : OrcFile.Version.byName(versionName); OrcFile.EncodingStrategy encodingStrategy; String enString = conf.get(HiveConf.ConfVars.HIVE_ORC_ENCODING_STRATEGY.varname); if (enString == null) { encodingStrategy = OrcFile.EncodingStrategy.SPEED; } else { encodingStrategy = OrcFile.EncodingStrategy.valueOf(enString); } OrcFile.CompressionStrategy compressionStrategy; String compString = conf.get(HiveConf.ConfVars.HIVE_ORC_COMPRESSION_STRATEGY.varname); if (compString == null) { compressionStrategy = OrcFile.CompressionStrategy.SPEED; } else { compressionStrategy = OrcFile.CompressionStrategy.valueOf(compString); } float paddingTolerance; paddingTolerance = conf.getFloat(HiveConf.ConfVars.HIVE_ORC_BLOCK_PADDING_TOLERANCE.varname, HiveConf.ConfVars.HIVE_ORC_BLOCK_PADDING_TOLERANCE.defaultFloatVal); long blockSizeValue = HiveConf.getLongVar(conf, HIVE_ORC_DEFAULT_BLOCK_SIZE); double bloomFilterFpp = BloomFilterIO.DEFAULT_FPP; ObjectInspector inspector = OrcStruct.createObjectInspector(orcSchema); return new OrcFlowFileWriter(flowFileOutputStream, path, conf, inspector, stripeSize, compress, bufferSize, rowIndexStride, getMemoryManager(conf), addBlockPadding, versionValue, null, // no callback encodingStrategy, compressionStrategy, paddingTolerance, blockSizeValue, null, // no Bloom Filter column names bloomFilterFpp); }
Example 10
Source File: HiveAuthorizationHelper.java From dremio-oss with Apache License 2.0 | 4 votes |
public HiveAuthorizationHelper(final IMetaStoreClient mClient, final HiveConf hiveConf, final String user) { authzEnabled = hiveConf.getBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED); if (!authzEnabled) { authorizerV2 = null; return; } try (final ContextClassLoaderSwapper cls = ContextClassLoaderSwapper.newInstance()) { final HiveConf hiveConfCopy = new HiveConf(hiveConf); hiveConfCopy.set("user.name", user); hiveConfCopy.set("proxy.user.name", user); final HiveAuthenticationProvider authenticator = HiveUtils.getAuthenticator(hiveConfCopy, HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER); // This must be retrieved before creating the session state, because creation of the // session state changes the given HiveConf's classloader to a UDF ClassLoader. final HiveAuthorizerFactory authorizerFactory = HiveUtils.getAuthorizerFactory(hiveConfCopy, HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER); SessionState ss = new SessionState(hiveConfCopy, user); authenticator.setSessionState(ss); HiveAuthzSessionContext.Builder authzContextBuilder = new HiveAuthzSessionContext.Builder(); authzContextBuilder.setClientType(CLIENT_TYPE.HIVESERVER2); // Dremio is emulating HS2 here authorizerV2 = authorizerFactory.createHiveAuthorizer( new HiveMetastoreClientFactory() { @Override public IMetaStoreClient getHiveMetastoreClient() throws HiveAuthzPluginException { return mClient; } }, hiveConf, authenticator, authzContextBuilder.build()); authorizerV2.applyAuthorizationConfigPolicy(hiveConfCopy); } catch (final HiveException e) { throw new RuntimeException("Failed to initialize Hive authorization components: " + e.getMessage(), e); } logger.trace("Hive authorization enabled"); }
Example 11
Source File: HiveTestService.java From hudi with Apache License 2.0 | 4 votes |
public TServer startMetaStore(String forceBindIP, int port, HiveConf conf) throws IOException { try { // Server will create new threads up to max as necessary. After an idle // period, it will destory threads to keep the number of threads in the // pool to min. int minWorkerThreads = conf.getIntVar(HiveConf.ConfVars.METASTORESERVERMINTHREADS); int maxWorkerThreads = conf.getIntVar(HiveConf.ConfVars.METASTORESERVERMAXTHREADS); boolean tcpKeepAlive = conf.getBoolVar(HiveConf.ConfVars.METASTORE_TCP_KEEP_ALIVE); boolean useFramedTransport = conf.getBoolVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_FRAMED_TRANSPORT); // don't support SASL yet // boolean useSasl = conf.getBoolVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL); TServerTransport serverTransport; if (forceBindIP != null) { InetSocketAddress address = new InetSocketAddress(forceBindIP, port); serverTransport = tcpKeepAlive ? new TServerSocketKeepAlive(address) : new TServerSocket(address); } else { serverTransport = tcpKeepAlive ? new TServerSocketKeepAlive(port) : new TServerSocket(port); } TProcessor processor; TTransportFactory transFactory; HiveMetaStore.HMSHandler baseHandler = new HiveMetaStore.HMSHandler("new db based metaserver", conf, false); IHMSHandler handler = RetryingHMSHandler.getProxy(conf, baseHandler, true); if (conf.getBoolVar(HiveConf.ConfVars.METASTORE_EXECUTE_SET_UGI)) { transFactory = useFramedTransport ? new ChainedTTransportFactory(new TFramedTransport.Factory(), new TUGIContainingTransport.Factory()) : new TUGIContainingTransport.Factory(); processor = new TUGIBasedProcessor<>(handler); LOG.info("Starting DB backed MetaStore Server with SetUGI enabled"); } else { transFactory = useFramedTransport ? new TFramedTransport.Factory() : new TTransportFactory(); processor = new TSetIpAddressProcessor<>(handler); LOG.info("Starting DB backed MetaStore Server"); } TThreadPoolServer.Args args = new TThreadPoolServer.Args(serverTransport).processor(processor) .transportFactory(transFactory).protocolFactory(new TBinaryProtocol.Factory()) .minWorkerThreads(minWorkerThreads).maxWorkerThreads(maxWorkerThreads); final TServer tServer = new TThreadPoolServer(args); executorService.submit(tServer::serve); return tServer; } catch (Throwable x) { throw new IOException(x); } }
Example 12
Source File: NiFiOrcUtils.java From nifi with Apache License 2.0 | 4 votes |
public static OrcFlowFileWriter createWriter(OutputStream flowFileOutputStream, Path path, Configuration conf, TypeInfo orcSchema, long stripeSize, CompressionKind compress, int bufferSize) throws IOException { int rowIndexStride = HiveConf.getIntVar(conf, HIVE_ORC_DEFAULT_ROW_INDEX_STRIDE); boolean addBlockPadding = HiveConf.getBoolVar(conf, HIVE_ORC_DEFAULT_BLOCK_PADDING); String versionName = HiveConf.getVar(conf, HIVE_ORC_WRITE_FORMAT); OrcFile.Version versionValue = (versionName == null) ? OrcFile.Version.CURRENT : OrcFile.Version.byName(versionName); OrcFile.EncodingStrategy encodingStrategy; String enString = conf.get(HiveConf.ConfVars.HIVE_ORC_ENCODING_STRATEGY.varname); if (enString == null) { encodingStrategy = OrcFile.EncodingStrategy.SPEED; } else { encodingStrategy = OrcFile.EncodingStrategy.valueOf(enString); } OrcFile.CompressionStrategy compressionStrategy; String compString = conf.get(HiveConf.ConfVars.HIVE_ORC_COMPRESSION_STRATEGY.varname); if (compString == null) { compressionStrategy = OrcFile.CompressionStrategy.SPEED; } else { compressionStrategy = OrcFile.CompressionStrategy.valueOf(compString); } float paddingTolerance; paddingTolerance = conf.getFloat(HiveConf.ConfVars.HIVE_ORC_BLOCK_PADDING_TOLERANCE.varname, HiveConf.ConfVars.HIVE_ORC_BLOCK_PADDING_TOLERANCE.defaultFloatVal); long blockSizeValue = HiveConf.getLongVar(conf, HIVE_ORC_DEFAULT_BLOCK_SIZE); double bloomFilterFpp = BloomFilterIO.DEFAULT_FPP; ObjectInspector inspector = OrcStruct.createObjectInspector(orcSchema); return new OrcFlowFileWriter(flowFileOutputStream, path, conf, inspector, stripeSize, compress, bufferSize, rowIndexStride, getMemoryManager(conf), addBlockPadding, versionValue, null, // no callback encodingStrategy, compressionStrategy, paddingTolerance, blockSizeValue, null, // no Bloom Filter column names bloomFilterFpp); }
Example 13
Source File: HiveService.java From kite with Apache License 2.0 | 4 votes |
public TServer startMetaStore(String forceBindIP, int port, HiveConf conf) throws IOException { try { // Server will create new threads up to max as necessary. After an idle // period, it will destory threads to keep the number of threads in the // pool to min. int minWorkerThreads = conf.getIntVar(HiveConf.ConfVars.METASTORESERVERMINTHREADS); int maxWorkerThreads = conf.getIntVar(HiveConf.ConfVars.METASTORESERVERMAXTHREADS); boolean tcpKeepAlive = conf.getBoolVar(HiveConf.ConfVars.METASTORE_TCP_KEEP_ALIVE); boolean useFramedTransport = conf.getBoolVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_FRAMED_TRANSPORT); // don't support SASL yet //boolean useSasl = conf.getBoolVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL); TServerTransport serverTransport; if (forceBindIP != null) { InetSocketAddress address = new InetSocketAddress(forceBindIP, port); serverTransport = tcpKeepAlive ? new TServerSocketKeepAlive(address) : new TServerSocket(address); } else { serverTransport = tcpKeepAlive ? new TServerSocketKeepAlive(port) : new TServerSocket(port); } TProcessor processor; TTransportFactory transFactory; IHMSHandler handler = (IHMSHandler) HiveMetaStore .newRetryingHMSHandler("new db based metaserver", conf, true); if (conf.getBoolVar(HiveConf.ConfVars.METASTORE_EXECUTE_SET_UGI)) { transFactory = useFramedTransport ? new ChainedTTransportFactory(new TFramedTransport.Factory(), new TUGIContainingTransport.Factory()) : new TUGIContainingTransport.Factory(); processor = new TUGIBasedProcessor<IHMSHandler>(handler); LOG.info("Starting DB backed MetaStore Server with SetUGI enabled"); } else { transFactory = useFramedTransport ? new TFramedTransport.Factory() : new TTransportFactory(); processor = new TSetIpAddressProcessor<IHMSHandler>(handler); LOG.info("Starting DB backed MetaStore Server"); } TThreadPoolServer.Args args = new TThreadPoolServer.Args(serverTransport) .processor(processor) .transportFactory(transFactory) .protocolFactory(new TBinaryProtocol.Factory()) .minWorkerThreads(minWorkerThreads) .maxWorkerThreads(maxWorkerThreads); final TServer tServer = new TThreadPoolServer(args); executorService.submit(new Runnable() { @Override public void run() { tServer.serve(); } }); return tServer; } catch (Throwable x) { throw new IOException(x); } }