Java Code Examples for org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper#readTree()
The following examples show how to use
org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper#readTree() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: WebFrontendITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void getTaskmanagers() throws Exception { String json = TestBaseUtils.getFromHTTP("http://localhost:" + getRestPort() + "/taskmanagers/"); ObjectMapper mapper = new ObjectMapper(); JsonNode parsed = mapper.readTree(json); ArrayNode taskManagers = (ArrayNode) parsed.get("taskmanagers"); assertNotNull(taskManagers); assertEquals(NUM_TASK_MANAGERS, taskManagers.size()); JsonNode taskManager = taskManagers.get(0); assertNotNull(taskManager); assertEquals(NUM_SLOTS, taskManager.get("slotsNumber").asInt()); assertTrue(taskManager.get("freeSlots").asInt() <= NUM_SLOTS); }
Example 2
Source File: JsonRowSchemaConverter.java From flink with Apache License 2.0 | 6 votes |
/** * Converts a JSON schema into Flink's type information. Throws an exception if the schema * cannot converted because of loss of precision or too flexible schema. * * <p>The converter can resolve simple schema references to solve those cases where entities * are defined at the beginning and then used throughout a document. */ @SuppressWarnings("unchecked") public static <T> TypeInformation<T> convert(String jsonSchema) { Preconditions.checkNotNull(jsonSchema, "JSON schema"); final ObjectMapper mapper = new ObjectMapper(); mapper.getFactory() .enable(JsonParser.Feature.ALLOW_COMMENTS) .enable(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES) .enable(JsonParser.Feature.ALLOW_SINGLE_QUOTES); final JsonNode node; try { node = mapper.readTree(jsonSchema); } catch (IOException e) { throw new IllegalArgumentException( "Invalid JSON schema.", e); } return (TypeInformation<T>) convertType("<root>", node, node); }
Example 3
Source File: WebMonitorUtils.java From flink with Apache License 2.0 | 6 votes |
public static Map<String, String> fromKeyValueJsonArray(String jsonString) { try { Map<String, String> map = new HashMap<>(); ObjectMapper m = new ObjectMapper(); ArrayNode array = (ArrayNode) m.readTree(jsonString); Iterator<JsonNode> elements = array.elements(); while (elements.hasNext()) { JsonNode node = elements.next(); String key = node.get("key").asText(); String value = node.get("value").asText(); map.put(key, value); } return map; } catch (Exception e) { throw new RuntimeException(e.getMessage(), e); } }
Example 4
Source File: WebFrontendITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void getTaskManagerLogAndStdoutFiles() { try { String json = TestBaseUtils.getFromHTTP("http://localhost:" + getRestPort() + "/taskmanagers/"); ObjectMapper mapper = new ObjectMapper(); JsonNode parsed = mapper.readTree(json); ArrayNode taskManagers = (ArrayNode) parsed.get("taskmanagers"); JsonNode taskManager = taskManagers.get(0); String id = taskManager.get("id").asText(); WebMonitorUtils.LogFileLocation logFiles = WebMonitorUtils.LogFileLocation.find(CLUSTER_CONFIGURATION); //we check for job manager log files, since no separate taskmanager logs exist FileUtils.writeStringToFile(logFiles.logFile, "job manager log"); String logs = TestBaseUtils.getFromHTTP("http://localhost:" + getRestPort() + "/taskmanagers/" + id + "/log"); assertTrue(logs.contains("job manager log")); FileUtils.writeStringToFile(logFiles.stdOutFile, "job manager out"); logs = TestBaseUtils.getFromHTTP("http://localhost:" + getRestPort() + "/taskmanagers/" + id + "/stdout"); assertTrue(logs.contains("job manager out")); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } }
Example 5
Source File: JsonRowSchemaConverter.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
/** * Converts a JSON schema into Flink's type information. Throws an exception if the schema * cannot converted because of loss of precision or too flexible schema. * * <p>The converter can resolve simple schema references to solve those cases where entities * are defined at the beginning and then used throughout a document. */ @SuppressWarnings("unchecked") public static <T> TypeInformation<T> convert(String jsonSchema) { Preconditions.checkNotNull(jsonSchema, "JSON schema"); final ObjectMapper mapper = new ObjectMapper(); mapper.getFactory() .enable(JsonParser.Feature.ALLOW_COMMENTS) .enable(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES) .enable(JsonParser.Feature.ALLOW_SINGLE_QUOTES); final JsonNode node; try { node = mapper.readTree(jsonSchema); } catch (IOException e) { throw new IllegalArgumentException( "Invalid JSON schema.", e); } return (TypeInformation<T>) convertType("<root>", node, node); }
Example 6
Source File: WebFrontendITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void getNumberOfTaskManagers() { try { String json = TestBaseUtils.getFromHTTP("http://localhost:" + getRestPort() + "/taskmanagers/"); ObjectMapper mapper = new ObjectMapper(); JsonNode response = mapper.readTree(json); ArrayNode taskManagers = (ArrayNode) response.get("taskmanagers"); assertNotNull(taskManagers); assertEquals(NUM_TASK_MANAGERS, taskManagers.size()); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } }
Example 7
Source File: JsonRowSchemaConverter.java From flink with Apache License 2.0 | 6 votes |
/** * Converts a JSON schema into Flink's type information. Throws an exception if the schema * cannot converted because of loss of precision or too flexible schema. * * <p>The converter can resolve simple schema references to solve those cases where entities * are defined at the beginning and then used throughout a document. */ @SuppressWarnings("unchecked") public static <T> TypeInformation<T> convert(String jsonSchema) { Preconditions.checkNotNull(jsonSchema, "JSON schema"); final ObjectMapper mapper = new ObjectMapper(); mapper.getFactory() .enable(JsonParser.Feature.ALLOW_COMMENTS) .enable(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES) .enable(JsonParser.Feature.ALLOW_SINGLE_QUOTES); final JsonNode node; try { node = mapper.readTree(jsonSchema); } catch (IOException e) { throw new IllegalArgumentException( "Invalid JSON schema.", e); } return (TypeInformation<T>) convertType("<root>", node, node); }
Example 8
Source File: WebFrontendITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void getTaskManagers() throws Exception { String json = TestBaseUtils.getFromHTTP("http://localhost:" + getRestPort() + "/taskmanagers/"); ObjectMapper mapper = new ObjectMapper(); JsonNode parsed = mapper.readTree(json); ArrayNode taskManagers = (ArrayNode) parsed.get("taskmanagers"); assertNotNull(taskManagers); assertEquals(NUM_TASK_MANAGERS, taskManagers.size()); JsonNode taskManager = taskManagers.get(0); assertNotNull(taskManager); assertEquals(NUM_SLOTS, taskManager.get("slotsNumber").asInt()); assertTrue(taskManager.get("freeSlots").asInt() <= NUM_SLOTS); }
Example 9
Source File: WebMonitorUtils.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
public static Map<String, String> fromKeyValueJsonArray(String jsonString) { try { Map<String, String> map = new HashMap<>(); ObjectMapper m = new ObjectMapper(); ArrayNode array = (ArrayNode) m.readTree(jsonString); Iterator<JsonNode> elements = array.elements(); while (elements.hasNext()) { JsonNode node = elements.next(); String key = node.get("key").asText(); String value = node.get("value").asText(); map.put(key, value); } return map; } catch (Exception e) { throw new RuntimeException(e.getMessage(), e); } }
Example 10
Source File: WebFrontendITCase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void getTaskManagerLogAndStdoutFiles() { try { String json = TestBaseUtils.getFromHTTP("http://localhost:" + getRestPort() + "/taskmanagers/"); ObjectMapper mapper = new ObjectMapper(); JsonNode parsed = mapper.readTree(json); ArrayNode taskManagers = (ArrayNode) parsed.get("taskmanagers"); JsonNode taskManager = taskManagers.get(0); String id = taskManager.get("id").asText(); WebMonitorUtils.LogFileLocation logFiles = WebMonitorUtils.LogFileLocation.find(CLUSTER_CONFIGURATION); //we check for job manager log files, since no separate taskmanager logs exist FileUtils.writeStringToFile(logFiles.logFile, "job manager log"); String logs = TestBaseUtils.getFromHTTP("http://localhost:" + getRestPort() + "/taskmanagers/" + id + "/log"); assertTrue(logs.contains("job manager log")); FileUtils.writeStringToFile(logFiles.stdOutFile, "job manager out"); logs = TestBaseUtils.getFromHTTP("http://localhost:" + getRestPort() + "/taskmanagers/" + id + "/stdout"); assertTrue(logs.contains("job manager out")); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } }
Example 11
Source File: WebFrontendITCase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void getTaskmanagers() throws Exception { String json = TestBaseUtils.getFromHTTP("http://localhost:" + getRestPort() + "/taskmanagers/"); ObjectMapper mapper = new ObjectMapper(); JsonNode parsed = mapper.readTree(json); ArrayNode taskManagers = (ArrayNode) parsed.get("taskmanagers"); assertNotNull(taskManagers); assertEquals(NUM_TASK_MANAGERS, taskManagers.size()); JsonNode taskManager = taskManagers.get(0); assertNotNull(taskManager); assertEquals(NUM_SLOTS, taskManager.get("slotsNumber").asInt()); assertTrue(taskManager.get("freeSlots").asInt() <= NUM_SLOTS); }
Example 12
Source File: WebFrontendITCase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void getNumberOfTaskManagers() { try { String json = TestBaseUtils.getFromHTTP("http://localhost:" + getRestPort() + "/taskmanagers/"); ObjectMapper mapper = new ObjectMapper(); JsonNode response = mapper.readTree(json); ArrayNode taskManagers = (ArrayNode) response.get("taskmanagers"); assertNotNull(taskManagers); assertEquals(NUM_TASK_MANAGERS, taskManagers.size()); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } }
Example 13
Source File: JsonModuleTest.java From stateful-functions with Apache License 2.0 | 5 votes |
private static StatefulFunctionModule fromPath(String path) { URL moduleUrl = JsonModuleTest.class.getClassLoader().getResource(path); ObjectMapper mapper = JsonServiceLoader.mapper(); final JsonNode json; try { json = mapper.readTree(moduleUrl); } catch (IOException e) { throw new RuntimeException(e); } JsonNode spec = json.at("/module/spec"); return new JsonModule(spec, moduleUrl); }
Example 14
Source File: ProtobufKafkaSourceProviderTest.java From stateful-functions with Apache License 2.0 | 5 votes |
private static JsonNode fromPath(String path) { URL moduleUrl = ProtobufKafkaSourceProvider.class.getClassLoader().getResource(path); ObjectMapper mapper = new ObjectMapper(new YAMLFactory()); try { return mapper.readTree(moduleUrl); } catch (IOException e) { throw new RuntimeException(e); } }
Example 15
Source File: YamlUtils.java From flink-statefun with Apache License 2.0 | 5 votes |
public static JsonNode loadAsJsonFromClassResource(ClassLoader classLoader, String pathToYaml) { URL moduleUrl = classLoader.getResource(pathToYaml); ObjectMapper mapper = new ObjectMapper(new YAMLFactory()); try { return mapper.readTree(moduleUrl); } catch (IOException e) { throw new RuntimeException(e); } }
Example 16
Source File: JsonServiceLoader.java From flink-statefun with Apache License 2.0 | 5 votes |
/** * Read a {@code StatefulFunction} module definition. * * <p>A valid resource module definition has to contain the metadata associated with this module, * such as its type. */ private static JsonNode readAndValidateModuleTree(ObjectMapper mapper, URL moduleYamlFile) throws IOException { JsonNode root = mapper.readTree(moduleYamlFile); validateMeta(moduleYamlFile, root); return root; }
Example 17
Source File: JsonJobGraphGenerationTest.java From flink with Apache License 2.0 | 4 votes |
@Override public void validateJson(String json) throws Exception { final Map<String, JsonNode> idToNode = new HashMap<>(); // validate the produced JSON ObjectMapper m = new ObjectMapper(); JsonNode rootNode = m.readTree(json); JsonNode idField = rootNode.get("jid"); JsonNode nameField = rootNode.get("name"); JsonNode arrayField = rootNode.get("nodes"); assertNotNull(idField); assertNotNull(nameField); assertNotNull(arrayField); assertTrue(idField.isTextual()); assertTrue(nameField.isTextual()); assertTrue(arrayField.isArray()); ArrayNode array = (ArrayNode) arrayField; Iterator<JsonNode> iter = array.elements(); while (iter.hasNext()) { JsonNode vertex = iter.next(); JsonNode vertexIdField = vertex.get("id"); JsonNode parallelismField = vertex.get("parallelism"); JsonNode contentsFields = vertex.get("description"); JsonNode operatorField = vertex.get("operator"); assertNotNull(vertexIdField); assertTrue(vertexIdField.isTextual()); assertNotNull(parallelismField); assertTrue(parallelismField.isNumber()); assertNotNull(contentsFields); assertTrue(contentsFields.isTextual()); assertNotNull(operatorField); assertTrue(operatorField.isTextual()); if (contentsFields.asText().startsWith("Sync")) { assertEquals(1, parallelismField.asInt()); } else { assertEquals(expectedParallelism, parallelismField.asInt()); } idToNode.put(vertexIdField.asText(), vertex); } assertEquals(numNodes, idToNode.size()); // check that all inputs are contained for (JsonNode node : idToNode.values()) { JsonNode inputsField = node.get("inputs"); if (inputsField != null) { Iterator<JsonNode> inputsIter = inputsField.elements(); while (inputsIter.hasNext()) { JsonNode inputNode = inputsIter.next(); JsonNode inputIdField = inputNode.get("id"); assertNotNull(inputIdField); assertTrue(inputIdField.isTextual()); String inputIdString = inputIdField.asText(); assertTrue(idToNode.containsKey(inputIdString)); } } } }
Example 18
Source File: JsonGeneratorTest.java From flink with Apache License 2.0 | 4 votes |
@Test public void testGeneratorWithoutAnyAttachements() { try { JobVertex source1 = new JobVertex("source 1"); JobVertex source2 = new JobVertex("source 2"); source2.setInvokableClass(DummyInvokable.class); JobVertex source3 = new JobVertex("source 3"); JobVertex intermediate1 = new JobVertex("intermediate 1"); JobVertex intermediate2 = new JobVertex("intermediate 2"); JobVertex join1 = new JobVertex("join 1"); JobVertex join2 = new JobVertex("join 2"); JobVertex sink1 = new JobVertex("sink 1"); JobVertex sink2 = new JobVertex("sink 2"); intermediate1.connectNewDataSetAsInput(source1, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED); intermediate2.connectNewDataSetAsInput(source2, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); join1.connectNewDataSetAsInput(intermediate1, DistributionPattern.POINTWISE, ResultPartitionType.BLOCKING); join1.connectNewDataSetAsInput(intermediate2, DistributionPattern.ALL_TO_ALL, ResultPartitionType.BLOCKING); join2.connectNewDataSetAsInput(join1, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED); join2.connectNewDataSetAsInput(source3, DistributionPattern.POINTWISE, ResultPartitionType.BLOCKING); sink1.connectNewDataSetAsInput(join2, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED); sink2.connectNewDataSetAsInput(join1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); JobGraph jg = new JobGraph("my job", source1, source2, source3, intermediate1, intermediate2, join1, join2, sink1, sink2); String plan = JsonPlanGenerator.generatePlan(jg); assertNotNull(plan); // validate the produced JSON ObjectMapper m = new ObjectMapper(); JsonNode rootNode = m.readTree(plan); // core fields assertEquals(new TextNode(jg.getJobID().toString()), rootNode.get("jid")); assertEquals(new TextNode(jg.getName()), rootNode.get("name")); assertTrue(rootNode.path("nodes").isArray()); for (Iterator<JsonNode> iter = rootNode.path("nodes").elements(); iter.hasNext(); ){ JsonNode next = iter.next(); JsonNode idNode = next.get("id"); assertNotNull(idNode); assertTrue(idNode.isTextual()); checkVertexExists(idNode.asText(), jg); String description = next.get("description").asText(); assertTrue( description.startsWith("source") || description.startsWith("sink") || description.startsWith("intermediate") || description.startsWith("join")); } } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } }
Example 19
Source File: JsonJobGraphGenerationTest.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
@Override public void validateJson(String json) throws Exception { final Map<String, JsonNode> idToNode = new HashMap<>(); // validate the produced JSON ObjectMapper m = new ObjectMapper(); JsonNode rootNode = m.readTree(json); JsonNode idField = rootNode.get("jid"); JsonNode nameField = rootNode.get("name"); JsonNode arrayField = rootNode.get("nodes"); assertNotNull(idField); assertNotNull(nameField); assertNotNull(arrayField); assertTrue(idField.isTextual()); assertTrue(nameField.isTextual()); assertTrue(arrayField.isArray()); ArrayNode array = (ArrayNode) arrayField; Iterator<JsonNode> iter = array.elements(); while (iter.hasNext()) { JsonNode vertex = iter.next(); JsonNode vertexIdField = vertex.get("id"); JsonNode parallelismField = vertex.get("parallelism"); JsonNode contentsFields = vertex.get("description"); JsonNode operatorField = vertex.get("operator"); assertNotNull(vertexIdField); assertTrue(vertexIdField.isTextual()); assertNotNull(parallelismField); assertTrue(parallelismField.isNumber()); assertNotNull(contentsFields); assertTrue(contentsFields.isTextual()); assertNotNull(operatorField); assertTrue(operatorField.isTextual()); if (contentsFields.asText().startsWith("Sync")) { assertEquals(1, parallelismField.asInt()); } else { assertEquals(expectedParallelism, parallelismField.asInt()); } idToNode.put(vertexIdField.asText(), vertex); } assertEquals(numNodes, idToNode.size()); // check that all inputs are contained for (JsonNode node : idToNode.values()) { JsonNode inputsField = node.get("inputs"); if (inputsField != null) { Iterator<JsonNode> inputsIter = inputsField.elements(); while (inputsIter.hasNext()) { JsonNode inputNode = inputsIter.next(); JsonNode inputIdField = inputNode.get("id"); assertNotNull(inputIdField); assertTrue(inputIdField.isTextual()); String inputIdString = inputIdField.asText(); assertTrue(idToNode.containsKey(inputIdString)); } } } }
Example 20
Source File: JsonServiceLoader.java From stateful-functions with Apache License 2.0 | 3 votes |
/** * Read a {@code StatefulFunction} module definition. * * <p>A valid resource module definition has to contain the following sections: * * <ul> * <li>meta - contains the metadata associated with this module, such as its type. * <li>spec - a specification of the module. i.e. the definied functions, routers etc'. * </ul> * * <p>If any of these sections are missing, this would be considered an invalid module definition, * in addition a type is a mandatory field of a module spec. */ private static JsonNode readAndValidateModuleTree(ObjectMapper mapper, URL moduleYamlFile) throws IOException { JsonNode root = mapper.readTree(moduleYamlFile); validateMeta(moduleYamlFile, root); validateSpec(moduleYamlFile, root); return root; }