Java Code Examples for org.nd4j.shade.jackson.databind.ObjectMapper#readTree()
The following examples show how to use
org.nd4j.shade.jackson.databind.ObjectMapper#readTree() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: Hdf5Archive.java From deeplearning4j with Apache License 2.0 | 5 votes |
/** * Read JSON-formatted string attribute. * * @param attribute HDF5 attribute to read as JSON formatted string. * @return JSON formatted string from HDF5 attribute * @throws UnsupportedKerasConfigurationException Unsupported Keras config */ private String readAttributeAsJson(Attribute attribute) throws UnsupportedKerasConfigurationException { synchronized (Hdf5Archive.LOCK_OBJECT) { VarLenType vl = attribute.getVarLenType(); int currBufferLength = 2048; String s; /* TODO: find a less hacky way to do this. * Reading variable length strings (from attributes) is a giant * pain. There does not appear to be any way to determine the * length of the string in advance, so we use a hack: choose a * buffer size and read the config. If Jackson fails to parse * it, then we must not have read the entire config. Increase * buffer and repeat. */ while (true) { byte[] attrBuffer = new byte[currBufferLength]; BytePointer attrPointer = new BytePointer(currBufferLength); attribute.read(vl, attrPointer); attrPointer.get(attrBuffer); s = new String(attrBuffer); ObjectMapper mapper = new ObjectMapper(); mapper.enable(DeserializationFeature.FAIL_ON_READING_DUP_TREE_KEY); try { mapper.readTree(s); break; } catch (IOException e) { //OK - we don't know how long the buffer needs to be, so we'll try again with larger buffer } if(currBufferLength == MAX_BUFFER_SIZE_BYTES){ throw new UnsupportedKerasConfigurationException("Could not read abnormally long HDF5 attribute: size exceeds " + currBufferLength + " bytes"); } else { currBufferLength = (int)Math.min(MAX_BUFFER_SIZE_BYTES, currBufferLength * 4L); } } vl.deallocate(); return s; } }
Example 2
Source File: DataAnalysis.java From DataVec with Apache License 2.0 | 4 votes |
private static DataAnalysis fromMapper(ObjectMapper om, String json) { List<ColumnMetaData> meta = new ArrayList<>(); List<ColumnAnalysis> analysis = new ArrayList<>(); try { JsonNode node = om.readTree(json); Iterator<String> fieldNames = node.fieldNames(); boolean hasDataAnalysis = false; while (fieldNames.hasNext()) { if ("DataAnalysis".equals(fieldNames.next())) { hasDataAnalysis = true; break; } } if (!hasDataAnalysis) { throw new RuntimeException(); } ArrayNode arrayNode = (ArrayNode) node.get("DataAnalysis"); for (int i = 0; i < arrayNode.size(); i++) { JsonNode analysisNode = arrayNode.get(i); String name = analysisNode.get(COL_NAME).asText(); int idx = analysisNode.get(COL_IDX).asInt(); ColumnType type = ColumnType.valueOf(analysisNode.get(COL_TYPE).asText()); JsonNode daNode = analysisNode.get(ANALYSIS); ColumnAnalysis dataAnalysis = om.treeToValue(daNode, ColumnAnalysis.class); if (type == ColumnType.Categorical) { ArrayNode an = (ArrayNode) analysisNode.get(CATEGORICAL_STATE_NAMES); List<String> stateNames = new ArrayList<>(an.size()); Iterator<JsonNode> iter = an.elements(); while (iter.hasNext()) { stateNames.add(iter.next().asText()); } meta.add(new CategoricalMetaData(name, stateNames)); } else { meta.add(type.newColumnMetaData(name)); } analysis.add(dataAnalysis); } } catch (Exception e) { throw new RuntimeException(e); } Schema schema = new Schema(meta); return new DataAnalysis(schema, analysis); }
Example 3
Source File: DataAnalysis.java From deeplearning4j with Apache License 2.0 | 4 votes |
private static DataAnalysis fromMapper(ObjectMapper om, String json) { List<ColumnMetaData> meta = new ArrayList<>(); List<ColumnAnalysis> analysis = new ArrayList<>(); try { JsonNode node = om.readTree(json); Iterator<String> fieldNames = node.fieldNames(); boolean hasDataAnalysis = false; while (fieldNames.hasNext()) { if ("DataAnalysis".equals(fieldNames.next())) { hasDataAnalysis = true; break; } } if (!hasDataAnalysis) { throw new RuntimeException(); } ArrayNode arrayNode = (ArrayNode) node.get("DataAnalysis"); for (int i = 0; i < arrayNode.size(); i++) { JsonNode analysisNode = arrayNode.get(i); String name = analysisNode.get(COL_NAME).asText(); int idx = analysisNode.get(COL_IDX).asInt(); ColumnType type = ColumnType.valueOf(analysisNode.get(COL_TYPE).asText()); JsonNode daNode = analysisNode.get(ANALYSIS); ColumnAnalysis dataAnalysis = om.treeToValue(daNode, ColumnAnalysis.class); if (type == ColumnType.Categorical) { ArrayNode an = (ArrayNode) analysisNode.get(CATEGORICAL_STATE_NAMES); List<String> stateNames = new ArrayList<>(an.size()); Iterator<JsonNode> iter = an.elements(); while (iter.hasNext()) { stateNames.add(iter.next().asText()); } meta.add(new CategoricalMetaData(name, stateNames)); } else { meta.add(type.newColumnMetaData(name)); } analysis.add(dataAnalysis); } } catch (Exception e) { throw new RuntimeException(e); } Schema schema = new Schema(meta); return new DataAnalysis(schema, analysis); }
Example 4
Source File: MultiLayerConfiguration.java From deeplearning4j with Apache License 2.0 | 4 votes |
/** * Handle {@link WeightInit} and {@link Distribution} from legacy configs in Json format. Copied from handling of {@link Activation} * above. * @return True if all is well and layer iteration shall continue. False else-wise. */ private static boolean handleLegacyWeightInitFromJson(String json, Layer l, ObjectMapper mapper, JsonNode confs, int layerCount) { if ((l instanceof BaseLayer) && ((BaseLayer) l).getWeightInitFn() == null) { try { JsonNode jsonNode = mapper.readTree(json); if (confs == null) { confs = jsonNode.get("confs"); } if (confs instanceof ArrayNode) { ArrayNode layerConfs = (ArrayNode) confs; JsonNode outputLayerNNCNode = layerConfs.get(layerCount); if (outputLayerNNCNode == null) return false; //Should never happen... JsonNode layerWrapperNode = outputLayerNNCNode.get("layer"); if (layerWrapperNode == null || layerWrapperNode.size() != 1) { return true; } JsonNode layerNode = layerWrapperNode.elements().next(); JsonNode weightInit = layerNode.get("weightInit"); //Should only have 1 element: "dense", "output", etc JsonNode distribution = layerNode.get("dist"); Distribution dist = null; if(distribution != null) { dist = mapper.treeToValue(distribution, Distribution.class); } if (weightInit != null) { final IWeightInit wi = WeightInit.valueOf(weightInit.asText()).getWeightInitFunction(dist); ((BaseLayer) l).setWeightInitFn(wi); } } } catch (IOException e) { log.warn("Layer with null WeightInit detected: " + l.getLayerName() + ", could not parse JSON", e); } } return true; }
Example 5
Source File: ComputationGraphConfiguration.java From deeplearning4j with Apache License 2.0 | 4 votes |
/** * Handle {@link WeightInit} and {@link Distribution} from legacy configs in Json format. Copied from handling of {@link Activation} * above. * @return True if all is well and layer iteration shall continue. False else-wise. */ private static void handleLegacyWeightInitFromJson(String json, Layer layer, ObjectMapper mapper, JsonNode vertices) { if (layer instanceof BaseLayer && ((BaseLayer) layer).getWeightInitFn() == null) { String layerName = layer.getLayerName(); try { if (vertices == null) { JsonNode jsonNode = mapper.readTree(json); vertices = jsonNode.get("vertices"); } JsonNode vertexNode = vertices.get(layerName); JsonNode layerVertexNode = vertexNode.get("LayerVertex"); if (layerVertexNode == null || !layerVertexNode.has("layerConf") || !layerVertexNode.get("layerConf").has("layer")) { return; } JsonNode layerWrapperNode = layerVertexNode.get("layerConf").get("layer"); if (layerWrapperNode == null || layerWrapperNode.size() != 1) { return; } JsonNode layerNode = layerWrapperNode.elements().next(); JsonNode weightInit = layerNode.get("weightInit"); //Should only have 1 element: "dense", "output", etc JsonNode distribution = layerNode.get("dist"); Distribution dist = null; if(distribution != null) { dist = mapper.treeToValue(distribution, Distribution.class); } if (weightInit != null) { final IWeightInit wi = WeightInit.valueOf(weightInit.asText()).getWeightInitFunction(dist); ((BaseLayer) layer).setWeightInitFn(wi); } } catch (IOException e) { log.warn("Layer with null ActivationFn field or pre-0.7.2 activation function detected: could not parse JSON", e); } } }