Java Code Examples for org.apache.spark.SparkConf#get()
The following examples show how to use
org.apache.spark.SparkConf#get() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: UtilHelpers.java From hudi with Apache License 2.0 | 6 votes |
private static SparkConf buildSparkConf(String appName, String defaultMaster, Map<String, String> additionalConfigs) { final SparkConf sparkConf = new SparkConf().setAppName(appName); String master = sparkConf.get("spark.master", defaultMaster); sparkConf.setMaster(master); if (master.startsWith("yarn")) { sparkConf.set("spark.eventLog.overwrite", "true"); sparkConf.set("spark.eventLog.enabled", "true"); } sparkConf.setIfMissing("spark.driver.maxResultSize", "2g"); sparkConf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer"); sparkConf.set("spark.hadoop.mapred.output.compress", "true"); sparkConf.set("spark.hadoop.mapred.output.compression.codec", "true"); sparkConf.set("spark.hadoop.mapred.output.compression.codec", "org.apache.hadoop.io.compress.GzipCodec"); sparkConf.set("spark.hadoop.mapred.output.compression.type", "BLOCK"); additionalConfigs.forEach(sparkConf::set); return HoodieWriteClient.registerClasses(sparkConf); }
Example 2
Source File: Word2VecVariables.java From deeplearning4j with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") public static <T> T assignVar(String variableName, SparkConf conf, Class clazz) throws Exception { Object ret; if (clazz.equals(Integer.class)) { ret = conf.getInt(variableName, (Integer) getDefault(variableName)); } else if (clazz.equals(Double.class)) { ret = conf.getDouble(variableName, (Double) getDefault(variableName)); } else if (clazz.equals(Boolean.class)) { ret = conf.getBoolean(variableName, (Boolean) getDefault(variableName)); } else if (clazz.equals(String.class)) { ret = conf.get(variableName, (String) getDefault(variableName)); } else if (clazz.equals(Long.class)) { ret = conf.getLong(variableName, (Long) getDefault(variableName)); } else { throw new Exception("Variable Type not supported. Only boolean, int, double and String supported."); } return (T) ret; }
Example 3
Source File: PSRpcFactory.java From systemds with Apache License 2.0 | 5 votes |
public static SparkPSProxy createSparkPSProxy(SparkConf conf, int port, LongAccumulator aRPC) throws IOException { long rpcTimeout = conf.contains("spark.rpc.askTimeout") ? conf.getTimeAsMs("spark.rpc.askTimeout") : conf.getTimeAsMs("spark.network.timeout", "120s"); String host = conf.get("spark.driver.host"); TransportContext context = createTransportContext(conf, new LocalParamServer()); return new SparkPSProxy(context.createClientFactory().createClient(host, port), rpcTimeout, aRPC); }
Example 4
Source File: EmbeddedTimelineService.java From hudi with Apache License 2.0 | 5 votes |
private void setHostAddrFromSparkConf(SparkConf sparkConf) { String hostAddr = sparkConf.get("spark.driver.host", null); if (hostAddr != null) { LOG.info("Overriding hostIp to (" + hostAddr + ") found in spark-conf. It was " + this.hostAddr); this.hostAddr = hostAddr; } else { LOG.warn("Unable to find driver bind address from spark config"); } }
Example 5
Source File: TimelineServerPerf.java From hudi with Apache License 2.0 | 5 votes |
private void setHostAddrFromSparkConf(SparkConf sparkConf) { String hostAddr = sparkConf.get("spark.driver.host", null); if (hostAddr != null) { LOG.info("Overriding hostIp to (" + hostAddr + ") found in spark-conf. It was " + this.hostAddr); this.hostAddr = hostAddr; } else { LOG.warn("Unable to find driver bind address from spark config"); } }
Example 6
Source File: PSRpcFactory.java From systemds with Apache License 2.0 | 5 votes |
public static SparkPSProxy createSparkPSProxy(SparkConf conf, int port, LongAccumulator aRPC) throws IOException { long rpcTimeout = conf.contains("spark.rpc.askTimeout") ? conf.getTimeAsMs("spark.rpc.askTimeout") : conf.getTimeAsMs("spark.network.timeout", "120s"); String host = conf.get("spark.driver.host"); TransportContext context = createTransportContext(conf, new LocalParamServer()); return new SparkPSProxy(context.createClientFactory().createClient(host, port), rpcTimeout, aRPC); }
Example 7
Source File: SparkEngineBase.java From beakerx with Apache License 2.0 | 4 votes |
private static boolean isLocalSpark(SparkConf sparkConf) { return sparkConf.contains(SPARK_MASTER) && sparkConf.get(SPARK_MASTER) != null && sparkConf.get("spark.master").startsWith("local"); }