Java Code Examples for org.codehaus.jackson.map.ObjectMapper#readValues()
The following examples show how to use
org.codehaus.jackson.map.ObjectMapper#readValues() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SLSUtils.java From hadoop with Apache License 2.0 | 7 votes |
/** * parse the sls trace file, return each host name */ public static Set<String> parseNodesFromSLSTrace(String jobTrace) throws IOException { Set<String> nodeSet = new HashSet<String>(); JsonFactory jsonF = new JsonFactory(); ObjectMapper mapper = new ObjectMapper(); Reader input = new FileReader(jobTrace); try { Iterator<Map> i = mapper.readValues( jsonF.createJsonParser(input), Map.class); while (i.hasNext()) { Map jsonE = i.next(); List tasks = (List) jsonE.get("job.tasks"); for (Object o : tasks) { Map jsonTask = (Map) o; String hostname = jsonTask.get("container.host").toString(); nodeSet.add(hostname); } } } finally { input.close(); } return nodeSet; }
Example 2
Source File: SLSUtils.java From hadoop with Apache License 2.0 | 6 votes |
/** * parse the input node file, return each host name */ public static Set<String> parseNodesFromNodeFile(String nodeFile) throws IOException { Set<String> nodeSet = new HashSet<String>(); JsonFactory jsonF = new JsonFactory(); ObjectMapper mapper = new ObjectMapper(); Reader input = new FileReader(nodeFile); try { Iterator<Map> i = mapper.readValues( jsonF.createJsonParser(input), Map.class); while (i.hasNext()) { Map jsonE = i.next(); String rack = "/" + jsonE.get("rack"); List tasks = (List) jsonE.get("nodes"); for (Object o : tasks) { Map jsonNode = (Map) o; nodeSet.add(rack + "/" + jsonNode.get("node")); } } } finally { input.close(); } return nodeSet; }
Example 3
Source File: RumenToSLSConverter.java From hadoop with Apache License 2.0 | 6 votes |
private static void generateSLSLoadFile(String inputFile, String outputFile) throws IOException { Reader input = new FileReader(inputFile); try { Writer output = new FileWriter(outputFile); try { ObjectMapper mapper = new ObjectMapper(); ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter(); Iterator<Map> i = mapper.readValues( new JsonFactory().createJsonParser(input), Map.class); while (i.hasNext()) { Map m = i.next(); output.write(writer.writeValueAsString(createSLSJob(m)) + EOL); } } finally { output.close(); } } finally { input.close(); } }
Example 4
Source File: SLSUtils.java From big-c with Apache License 2.0 | 6 votes |
/** * parse the sls trace file, return each host name */ public static Set<String> parseNodesFromSLSTrace(String jobTrace) throws IOException { Set<String> nodeSet = new HashSet<String>(); JsonFactory jsonF = new JsonFactory(); ObjectMapper mapper = new ObjectMapper(); Reader input = new FileReader(jobTrace); try { Iterator<Map> i = mapper.readValues( jsonF.createJsonParser(input), Map.class); while (i.hasNext()) { Map jsonE = i.next(); List tasks = (List) jsonE.get("job.tasks"); for (Object o : tasks) { Map jsonTask = (Map) o; String hostname = jsonTask.get("container.host").toString(); nodeSet.add(hostname); } } } finally { input.close(); } return nodeSet; }
Example 5
Source File: SLSUtils.java From big-c with Apache License 2.0 | 6 votes |
/** * parse the input node file, return each host name */ public static Set<String> parseNodesFromNodeFile(String nodeFile) throws IOException { Set<String> nodeSet = new HashSet<String>(); JsonFactory jsonF = new JsonFactory(); ObjectMapper mapper = new ObjectMapper(); Reader input = new FileReader(nodeFile); try { Iterator<Map> i = mapper.readValues( jsonF.createJsonParser(input), Map.class); while (i.hasNext()) { Map jsonE = i.next(); String rack = "/" + jsonE.get("rack"); List tasks = (List) jsonE.get("nodes"); for (Object o : tasks) { Map jsonNode = (Map) o; nodeSet.add(rack + "/" + jsonNode.get("node")); } } } finally { input.close(); } return nodeSet; }
Example 6
Source File: RumenToSLSConverter.java From big-c with Apache License 2.0 | 6 votes |
private static void generateSLSLoadFile(String inputFile, String outputFile) throws IOException { Reader input = new FileReader(inputFile); try { Writer output = new FileWriter(outputFile); try { ObjectMapper mapper = new ObjectMapper(); ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter(); Iterator<Map> i = mapper.readValues( new JsonFactory().createJsonParser(input), Map.class); while (i.hasNext()) { Map m = i.next(); output.write(writer.writeValueAsString(createSLSJob(m)) + EOL); } } finally { output.close(); } } finally { input.close(); } }
Example 7
Source File: SLSRunner.java From hadoop with Apache License 2.0 | 4 votes |
/** * parse workload information from sls trace files */ @SuppressWarnings("unchecked") private void startAMFromSLSTraces(Resource containerResource, int heartbeatInterval) throws IOException { // parse from sls traces JsonFactory jsonF = new JsonFactory(); ObjectMapper mapper = new ObjectMapper(); for (String inputTrace : inputTraces) { Reader input = new FileReader(inputTrace); try { Iterator<Map> i = mapper.readValues(jsonF.createJsonParser(input), Map.class); while (i.hasNext()) { Map jsonJob = i.next(); // load job information long jobStartTime = Long.parseLong( jsonJob.get("job.start.ms").toString()); long jobFinishTime = Long.parseLong( jsonJob.get("job.end.ms").toString()); String user = (String) jsonJob.get("job.user"); if (user == null) user = "default"; String queue = jsonJob.get("job.queue.name").toString(); String oldAppId = jsonJob.get("job.id").toString(); boolean isTracked = trackedApps.contains(oldAppId); int queueSize = queueAppNumMap.containsKey(queue) ? queueAppNumMap.get(queue) : 0; queueSize ++; queueAppNumMap.put(queue, queueSize); // tasks List tasks = (List) jsonJob.get("job.tasks"); if (tasks == null || tasks.size() == 0) { continue; } List<ContainerSimulator> containerList = new ArrayList<ContainerSimulator>(); for (Object o : tasks) { Map jsonTask = (Map) o; String hostname = jsonTask.get("container.host").toString(); long taskStart = Long.parseLong( jsonTask.get("container.start.ms").toString()); long taskFinish = Long.parseLong( jsonTask.get("container.end.ms").toString()); long lifeTime = taskFinish - taskStart; int priority = Integer.parseInt( jsonTask.get("container.priority").toString()); String type = jsonTask.get("container.type").toString(); containerList.add(new ContainerSimulator(containerResource, lifeTime, hostname, priority, type)); } // create a new AM String amType = jsonJob.get("am.type").toString(); AMSimulator amSim = (AMSimulator) ReflectionUtils.newInstance( amClassMap.get(amType), new Configuration()); if (amSim != null) { amSim.init(AM_ID++, heartbeatInterval, containerList, rm, this, jobStartTime, jobFinishTime, user, queue, isTracked, oldAppId); runner.schedule(amSim); maxRuntime = Math.max(maxRuntime, jobFinishTime); numTasks += containerList.size(); amMap.put(oldAppId, amSim); } } } finally { input.close(); } } }
Example 8
Source File: SLSRunner.java From big-c with Apache License 2.0 | 4 votes |
/** * parse workload information from sls trace files */ @SuppressWarnings("unchecked") private void startAMFromSLSTraces(Resource containerResource, int heartbeatInterval) throws IOException { // parse from sls traces JsonFactory jsonF = new JsonFactory(); ObjectMapper mapper = new ObjectMapper(); for (String inputTrace : inputTraces) { Reader input = new FileReader(inputTrace); try { Iterator<Map> i = mapper.readValues(jsonF.createJsonParser(input), Map.class); while (i.hasNext()) { Map jsonJob = i.next(); // load job information long jobStartTime = Long.parseLong( jsonJob.get("job.start.ms").toString()); long jobFinishTime = Long.parseLong( jsonJob.get("job.end.ms").toString()); String user = (String) jsonJob.get("job.user"); if (user == null) user = "default"; String queue = jsonJob.get("job.queue.name").toString(); String oldAppId = jsonJob.get("job.id").toString(); boolean isTracked = trackedApps.contains(oldAppId); int queueSize = queueAppNumMap.containsKey(queue) ? queueAppNumMap.get(queue) : 0; queueSize ++; queueAppNumMap.put(queue, queueSize); // tasks List tasks = (List) jsonJob.get("job.tasks"); if (tasks == null || tasks.size() == 0) { continue; } List<ContainerSimulator> containerList = new ArrayList<ContainerSimulator>(); for (Object o : tasks) { Map jsonTask = (Map) o; String hostname = jsonTask.get("container.host").toString(); long taskStart = Long.parseLong( jsonTask.get("container.start.ms").toString()); long taskFinish = Long.parseLong( jsonTask.get("container.end.ms").toString()); long lifeTime = taskFinish - taskStart; int priority = Integer.parseInt( jsonTask.get("container.priority").toString()); String type = jsonTask.get("container.type").toString(); containerList.add(new ContainerSimulator(containerResource, lifeTime, hostname, priority, type)); } // create a new AM String amType = jsonJob.get("am.type").toString(); AMSimulator amSim = (AMSimulator) ReflectionUtils.newInstance( amClassMap.get(amType), new Configuration()); if (amSim != null) { amSim.init(AM_ID++, heartbeatInterval, containerList, rm, this, jobStartTime, jobFinishTime, user, queue, isTracked, oldAppId); runner.schedule(amSim); maxRuntime = Math.max(maxRuntime, jobFinishTime); numTasks += containerList.size(); amMap.put(oldAppId, amSim); } } } finally { input.close(); } } }