Java Code Examples for org.apache.spark.storage.StorageLevel#MEMORY_ONLY
The following examples show how to use
org.apache.spark.storage.StorageLevel#MEMORY_ONLY .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: GraphXGraphGenerator.java From rya with Apache License 2.0 | 8 votes |
public Graph<RyaTypeWritable, RyaTypeWritable> createGraph(SparkContext sc, Configuration conf) throws IOException, AccumuloSecurityException{ StorageLevel storageLvl1 = StorageLevel.MEMORY_ONLY(); StorageLevel storageLvl2 = StorageLevel.MEMORY_ONLY(); ClassTag<RyaTypeWritable> RTWTag = ClassTag$.MODULE$.apply(RyaTypeWritable.class); RyaTypeWritable rtw = null; RDD<Tuple2<Object, RyaTypeWritable>> vertexRDD = getVertexRDD(sc, conf); RDD<Tuple2<Object, Edge>> edgeRDD = getEdgeRDD(sc, conf); JavaRDD<Tuple2<Object, Edge>> jrddTuple = edgeRDD.toJavaRDD(); JavaRDD<Edge<RyaTypeWritable>> jrdd = jrddTuple.map(tuple -> tuple._2); RDD<Edge<RyaTypeWritable>> goodERDD = JavaRDD.toRDD(jrdd); return Graph.apply(vertexRDD, goodERDD, rtw, storageLvl1, storageLvl2, RTWTag, RTWTag); }
Example 2
Source File: GrepCaching.java From flink-perf with Apache License 2.0 | 4 votes |
public static void main(String[] args) { String master = args[0]; String inFile = args[1]; String outFile = args[2]; String storageLevel = args[3]; String patterns[] = new String[args.length-4]; System.arraycopy(args, 4, patterns, 0, args.length - 4); System.err.println("Starting spark with master="+master+" in="+inFile); System.err.println("Using patterns: "+ Arrays.toString(patterns)); SparkConf conf = new SparkConf().setAppName("Grep job").setMaster(master).set("spark.hadoop.validateOutputSpecs", "false"); JavaSparkContext sc = new JavaSparkContext(conf); StorageLevel sl; switch(storageLevel) { case "MEMORY_ONLY": sl = StorageLevel.MEMORY_ONLY(); break; case "MEMORY_AND_DISK": sl = StorageLevel.MEMORY_AND_DISK(); break; case "MEMORY_ONLY_SER": sl = StorageLevel.MEMORY_ONLY_SER(); break; case "MEMORY_AND_DISK_SER": sl = StorageLevel.MEMORY_AND_DISK_SER(); break; case "NONE": sl = StorageLevel.NONE(); break; default: throw new RuntimeException("Unknown storage level "+storageLevel); } JavaRDD<String> file = sc.textFile(inFile).persist(sl); for(int p = 0; p < patterns.length; p++) { final String pattern = patterns[p]; JavaRDD<String> res = file.filter(new Function<String, Boolean>() { private static final long serialVersionUID = 1L; Pattern p = Pattern.compile(pattern); @Override public Boolean call(String value) throws Exception { if (value == null || value.length() == 0) { return false; } final Matcher m = p.matcher(value); if (m.find()) { return true; } return false; } }); res.saveAsTextFile(outFile+"_"+pattern); } }
Example 3
Source File: Throughput.java From flink-perf with Apache License 2.0 | 4 votes |
@Override public StorageLevel storageLevel() { return StorageLevel.MEMORY_ONLY(); }
Example 4
Source File: StringListReceiver.java From cxf with Apache License 2.0 | 4 votes |
public StringListReceiver(List<String> inputStrings) { super(StorageLevel.MEMORY_ONLY()); this.inputStrings = inputStrings; }
Example 5
Source File: KafkaReceiver.java From kafka-spark-consumer with Apache License 2.0 | 4 votes |
public KafkaReceiver(KafkaConfig config, int partitionId, KafkaMessageHandler messageHandler) { this(config, partitionId, StorageLevel.MEMORY_ONLY(), messageHandler); }
Example 6
Source File: KafkaRangeReceiver.java From kafka-spark-consumer with Apache License 2.0 | 4 votes |
public KafkaRangeReceiver(KafkaConfig config, Set<Integer> partitionSet, KafkaMessageHandler messageHandler) { this(config, partitionSet, StorageLevel.MEMORY_ONLY(), messageHandler); }