Java Code Examples for org.apache.kylin.common.KylinConfig#isZKLocal()
The following examples show how to use
org.apache.kylin.common.KylinConfig#isZKLocal() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ZKUtil.java From kylin-on-parquet-v2 with Apache License 2.0 | 5 votes |
public static CuratorFramework getZookeeperClient(KylinConfig config) { RetryPolicy retryPolicy = getRetryPolicy(config); if (config.isZKLocal()) { startTestingServer(); } return getZookeeperClient(getZKConnectString(config), retryPolicy); }
Example 2
Source File: NSparkExecutable.java From kylin-on-parquet-v2 with Apache License 2.0 | 4 votes |
@Override protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { //context.setLogPath(getSparkDriverLogHdfsPath(context.getConfig())); final KylinConfig config = wrapConfig(context); String sparkHome = KylinConfig.getSparkHome(); if (StringUtils.isEmpty(sparkHome) && !config.isUTEnv()) { throw new RuntimeException("Missing spark home"); } String kylinJobJar = config.getKylinParquetJobJarPath(); if (!config.isUTEnv() && StringUtils.isEmpty(kylinJobJar) && !config.isUTEnv()) { throw new RuntimeException("Missing kylin parquet job jar"); } String hadoopConf = System.getProperty("kylin.hadoop.conf.dir"); logger.info("write hadoop conf is {} ", config.getBuildConf()); if (!config.getBuildConf().isEmpty()) { logger.info("write hadoop conf is {} ", config.getBuildConf()); hadoopConf = config.getBuildConf(); } if (StringUtils.isEmpty(hadoopConf) && !config.isUTEnv() && !config.isZKLocal()) { throw new RuntimeException( "kylin_hadoop_conf_dir is empty, check if there's error in the output of 'kylin.sh start'"); } File hiveConfFile = new File(hadoopConf, "hive-site.xml"); if (!hiveConfFile.exists() && !config.isUTEnv() && !config.isZKLocal()) { throw new RuntimeException("Cannot find hive-site.xml in kylin_hadoop_conf_dir: " + hadoopConf + // ". In order to enable spark cubing, you must set kylin.env.hadoop-conf-dir to a dir which contains at least core-site.xml, hdfs-site.xml, hive-site.xml, mapred-site.xml, yarn-site.xml"); } String jars = getJars(); if (StringUtils.isEmpty(jars)) { jars = kylinJobJar; } deleteJobTmpDirectoryOnExists(); onExecuteStart(context); try { attachMetadataAndKylinProps(config); } catch (IOException e) { throw new ExecuteException("meta dump failed", e); } String filePath = dumpArgs(); if (config.isUTEnv() || config.isZKLocal()) { return runLocalMode(filePath, config); } else { logger.info("Task id: {}", getId()); killOrphanApplicationIfExists(config, getId()); return runSparkSubmit(config, hadoopConf, jars, kylinJobJar, "-className " + getSparkSubmitClassName() + " " + filePath, getParent().getId()); } }