Java Code Examples for org.datavec.api.transform.ColumnType#valueOf()
The following examples show how to use
org.datavec.api.transform.ColumnType#valueOf() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SchemaTypeUtils.java From konduit-serving with Apache License 2.0 | 4 votes |
/** * Create a {@link Schema} from a {@link JsonObject} * schema descriptor. The schema descriptor contains a json object of keys * of type {@link ColumnType} values in the form of: * name : {@link ColumnType} value * * There are 2 exceptions to this rule. * {@link ColumnType#NDArray} and {@link ColumnType#Categorical} * both are json objects. * {@link ColumnType#NDArray} has the form: * {name : shape: [], serialization type: "json" | "b64"} * {@link ColumnType#Categorical} has the form: * {categories: []} * {@link ColumnType#Time} has the form: * {timeZoneId: timeZoneId} * * * @param schemaDescriptor a {@link JsonObject} with the form * described above * @return the equivalent {@link Schema} derived from the given descriptor */ public static Schema schemaFromDynamicSchemaDefinition(JsonObject schemaDescriptor) { Schema.Builder schemaBuilder = new Builder(); for(String key : schemaDescriptor.fieldNames()) { JsonObject fieldInfo = schemaDescriptor.getJsonObject(key); JsonObject fieldInfoObject = fieldInfo.getJsonObject("fieldInfo"); if(fieldInfoObject == null) { throw new IllegalArgumentException("Unable to find object fieldInfo!"); } if(!fieldInfoObject.containsKey("type")) { throw new IllegalArgumentException("Illegal field info. Missing key type for identifying type of field"); } //convert image to bytes and let user pre process accordingly String type = fieldInfoObject.getString("type"); if(type.equals("Image")) { type = "Bytes"; } switch(ColumnType.valueOf(type)) { case Boolean: schemaBuilder.addColumnBoolean(key); break; case Double: schemaBuilder.addColumnDouble(key); break; case Float: schemaBuilder.addColumnFloat(key); break; case Long: schemaBuilder.addColumnLong(key); break; case String: schemaBuilder.addColumnString(key); break; case Integer: schemaBuilder.addColumnInteger(key); break; case NDArray: JsonArray shapeArr = fieldInfoObject.getJsonArray("shape"); long[] shape = new long[shapeArr.size()]; for(int i = 0; i < shape.length; i++) { shape[i] = shapeArr.getLong(i); } schemaBuilder.addColumnNDArray(key,shape); break; case Categorical: JsonArray jsonArray = fieldInfoObject.getJsonArray("categories"); String[] categories = new String[jsonArray.size()]; for(int i = 0; i < categories.length; i++) { categories[i] = jsonArray.getString(i); } schemaBuilder.addColumnCategorical(key,categories); break; case Bytes: ColumnMetaData columnMetaData = new BinaryMetaData(key); schemaBuilder.addColumn(columnMetaData); break; case Time: TimeZone zoneById = TimeZone.getTimeZone(fieldInfoObject.getString("timeZoneId")); schemaBuilder.addColumnTime(key,zoneById); break; } } return schemaBuilder.build(); }
Example 2
Source File: DataAnalysis.java From DataVec with Apache License 2.0 | 4 votes |
private static DataAnalysis fromMapper(ObjectMapper om, String json) { List<ColumnMetaData> meta = new ArrayList<>(); List<ColumnAnalysis> analysis = new ArrayList<>(); try { JsonNode node = om.readTree(json); Iterator<String> fieldNames = node.fieldNames(); boolean hasDataAnalysis = false; while (fieldNames.hasNext()) { if ("DataAnalysis".equals(fieldNames.next())) { hasDataAnalysis = true; break; } } if (!hasDataAnalysis) { throw new RuntimeException(); } ArrayNode arrayNode = (ArrayNode) node.get("DataAnalysis"); for (int i = 0; i < arrayNode.size(); i++) { JsonNode analysisNode = arrayNode.get(i); String name = analysisNode.get(COL_NAME).asText(); int idx = analysisNode.get(COL_IDX).asInt(); ColumnType type = ColumnType.valueOf(analysisNode.get(COL_TYPE).asText()); JsonNode daNode = analysisNode.get(ANALYSIS); ColumnAnalysis dataAnalysis = om.treeToValue(daNode, ColumnAnalysis.class); if (type == ColumnType.Categorical) { ArrayNode an = (ArrayNode) analysisNode.get(CATEGORICAL_STATE_NAMES); List<String> stateNames = new ArrayList<>(an.size()); Iterator<JsonNode> iter = an.elements(); while (iter.hasNext()) { stateNames.add(iter.next().asText()); } meta.add(new CategoricalMetaData(name, stateNames)); } else { meta.add(type.newColumnMetaData(name)); } analysis.add(dataAnalysis); } } catch (Exception e) { throw new RuntimeException(e); } Schema schema = new Schema(meta); return new DataAnalysis(schema, analysis); }
Example 3
Source File: DataAnalysis.java From deeplearning4j with Apache License 2.0 | 4 votes |
private static DataAnalysis fromMapper(ObjectMapper om, String json) { List<ColumnMetaData> meta = new ArrayList<>(); List<ColumnAnalysis> analysis = new ArrayList<>(); try { JsonNode node = om.readTree(json); Iterator<String> fieldNames = node.fieldNames(); boolean hasDataAnalysis = false; while (fieldNames.hasNext()) { if ("DataAnalysis".equals(fieldNames.next())) { hasDataAnalysis = true; break; } } if (!hasDataAnalysis) { throw new RuntimeException(); } ArrayNode arrayNode = (ArrayNode) node.get("DataAnalysis"); for (int i = 0; i < arrayNode.size(); i++) { JsonNode analysisNode = arrayNode.get(i); String name = analysisNode.get(COL_NAME).asText(); int idx = analysisNode.get(COL_IDX).asInt(); ColumnType type = ColumnType.valueOf(analysisNode.get(COL_TYPE).asText()); JsonNode daNode = analysisNode.get(ANALYSIS); ColumnAnalysis dataAnalysis = om.treeToValue(daNode, ColumnAnalysis.class); if (type == ColumnType.Categorical) { ArrayNode an = (ArrayNode) analysisNode.get(CATEGORICAL_STATE_NAMES); List<String> stateNames = new ArrayList<>(an.size()); Iterator<JsonNode> iter = an.elements(); while (iter.hasNext()) { stateNames.add(iter.next().asText()); } meta.add(new CategoricalMetaData(name, stateNames)); } else { meta.add(type.newColumnMetaData(name)); } analysis.add(dataAnalysis); } } catch (Exception e) { throw new RuntimeException(e); } Schema schema = new Schema(meta); return new DataAnalysis(schema, analysis); }