Java Code Examples for org.apache.spark.SparkConf#getDouble()
The following examples show how to use
org.apache.spark.SparkConf#getDouble() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SparkExecutionContext.java From systemds with Apache License 2.0 | 6 votes |
public void analyzeSparkConfiguationLegacy(SparkConf conf) { //ensure allocated spark conf SparkConf sconf = (conf == null) ? createSystemDSSparkConf() : conf; //parse absolute executor memory _memExecutor = UtilFunctions.parseMemorySize( sconf.get("spark.executor.memory", "1g")); //get data and shuffle memory ratios (defaults not specified in job conf) double dataFrac = sconf.getDouble("spark.storage.memoryFraction", 0.6); //default 60% _memDataMinFrac = dataFrac; _memDataMaxFrac = dataFrac; _memBroadcastFrac = dataFrac * BROADCAST_DATA_FRACTION; //default 18% //analyze spark degree of parallelism analyzeSparkParallelismConfiguation(sconf); }
Example 2
Source File: SparkExecutionContext.java From systemds with Apache License 2.0 | 6 votes |
public void analyzeSparkConfiguation(SparkConf conf) { //ensure allocated spark conf SparkConf sconf = (conf == null) ? createSystemDSSparkConf() : conf; //parse absolute executor memory, incl fixed cut off _memExecutor = UtilFunctions.parseMemorySize( sconf.get("spark.executor.memory", "1g")) - RESERVED_SYSTEM_MEMORY_BYTES; //get data and shuffle memory ratios (defaults not specified in job conf) _memDataMinFrac = sconf.getDouble("spark.memory.storageFraction", 0.5); //default 50% _memDataMaxFrac = sconf.getDouble("spark.memory.fraction", 0.6); //default 60% _memBroadcastFrac = _memDataMaxFrac * BROADCAST_DATA_FRACTION; //default 21% //analyze spark degree of parallelism analyzeSparkParallelismConfiguation(sconf); }
Example 3
Source File: SparkExecutionContext.java From systemds with Apache License 2.0 | 6 votes |
public void analyzeSparkConfiguationLegacy(SparkConf conf) { //ensure allocated spark conf SparkConf sconf = (conf == null) ? createSystemDSSparkConf() : conf; //parse absolute executor memory _memExecutor = UtilFunctions.parseMemorySize( sconf.get("spark.executor.memory", "1g")); //get data and shuffle memory ratios (defaults not specified in job conf) double dataFrac = sconf.getDouble("spark.storage.memoryFraction", 0.6); //default 60% _memDataMinFrac = dataFrac; _memDataMaxFrac = dataFrac; _memBroadcastFrac = dataFrac * BROADCAST_DATA_FRACTION; //default 18% //analyze spark degree of parallelism analyzeSparkParallelismConfiguation(sconf); }
Example 4
Source File: SparkExecutionContext.java From systemds with Apache License 2.0 | 6 votes |
public void analyzeSparkConfiguation(SparkConf conf) { //ensure allocated spark conf SparkConf sconf = (conf == null) ? createSystemDSSparkConf() : conf; //parse absolute executor memory, incl fixed cut off _memExecutor = UtilFunctions.parseMemorySize( sconf.get("spark.executor.memory", "1g")) - RESERVED_SYSTEM_MEMORY_BYTES; //get data and shuffle memory ratios (defaults not specified in job conf) _memDataMinFrac = sconf.getDouble("spark.memory.storageFraction", 0.5); //default 50% _memDataMaxFrac = sconf.getDouble("spark.memory.fraction", 0.6); //default 60% _memBroadcastFrac = _memDataMaxFrac * BROADCAST_DATA_FRACTION; //default 21% //analyze spark degree of parallelism analyzeSparkParallelismConfiguation(sconf); }
Example 5
Source File: Word2VecVariables.java From deeplearning4j with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") public static <T> T assignVar(String variableName, SparkConf conf, Class clazz) throws Exception { Object ret; if (clazz.equals(Integer.class)) { ret = conf.getInt(variableName, (Integer) getDefault(variableName)); } else if (clazz.equals(Double.class)) { ret = conf.getDouble(variableName, (Double) getDefault(variableName)); } else if (clazz.equals(Boolean.class)) { ret = conf.getBoolean(variableName, (Boolean) getDefault(variableName)); } else if (clazz.equals(String.class)) { ret = conf.get(variableName, (String) getDefault(variableName)); } else if (clazz.equals(Long.class)) { ret = conf.getLong(variableName, (Long) getDefault(variableName)); } else { throw new Exception("Variable Type not supported. Only boolean, int, double and String supported."); } return (T) ret; }
Example 6
Source File: Word2VecPerformerVoid.java From deeplearning4j with Apache License 2.0 | 6 votes |
public void setup(SparkConf conf) { useAdaGrad = conf.getBoolean(ADAGRAD, false); negative = conf.getDouble(NEGATIVE, 5); numWords = conf.getInt(NUM_WORDS, 1); window = conf.getInt(WINDOW, 5); alpha = conf.getDouble(ALPHA, 0.025f); minAlpha = conf.getDouble(MIN_ALPHA, 1e-2f); totalWords = conf.getInt(NUM_WORDS, 1); iterations = conf.getInt(ITERATIONS, 5); vectorLength = conf.getInt(VECTOR_LENGTH, 100); initExpTable(); if (negative > 0 && conf.contains(TABLE)) { ByteArrayInputStream bis = new ByteArrayInputStream(conf.get(TABLE).getBytes()); DataInputStream dis = new DataInputStream(bis); table = Nd4j.read(dis); } }
Example 7
Source File: Word2VecPerformer.java From deeplearning4j with Apache License 2.0 | 6 votes |
public void setup(SparkConf conf) { useAdaGrad = conf.getBoolean(Word2VecVariables.ADAGRAD, false); negative = conf.getDouble(Word2VecVariables.NEGATIVE, 5); numWords = conf.getInt(Word2VecVariables.NUM_WORDS, 1); window = conf.getInt(Word2VecVariables.WINDOW, 5); alpha = conf.getDouble(Word2VecVariables.ALPHA, 0.025f); minAlpha = conf.getDouble(Word2VecVariables.MIN_ALPHA, 1e-2f); totalWords = conf.getInt(Word2VecVariables.NUM_WORDS, 1); vectorLength = conf.getInt(Word2VecVariables.VECTOR_LENGTH, 100); initExpTable(); if (negative > 0 && conf.contains(Word2VecVariables.TABLE)) { ByteArrayInputStream bis = new ByteArrayInputStream(conf.get(Word2VecVariables.TABLE).getBytes()); DataInputStream dis = new DataInputStream(bis); table = Nd4j.read(dis); } }