Java Code Examples for org.codehaus.jackson.JsonGenerator#writeObject()
The following examples show how to use
org.codehaus.jackson.JsonGenerator#writeObject() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: GroupDefinitionsCacheGenerator.java From FoxBPM with Apache License 2.0 | 6 votes |
public void generate(ZipOutputStream out) { log.debug("开始处理GroupDefinitions.data..."); try{ List<GroupDefinition> groupDefinitions = FoxBpmUtil.getProcessEngine().getIdentityService().getAllGroupDefinitions(); Map<String,Object> resultMap = new HashMap<String, Object>(); resultMap.put("data", groupDefinitions); ObjectMapper objectMapper = new ObjectMapper(); JsonGenerator jsonGenerator = objectMapper.getJsonFactory().createJsonGenerator(out, JsonEncoding.UTF8); String tmpEntryName = "cache/allGroupDefinitions.data"; ZipEntry zipEntry = new ZipEntry(tmpEntryName); zipEntry.setMethod(ZipEntry.DEFLATED);// 设置条目的压缩方式 out.putNextEntry(zipEntry); jsonGenerator.writeObject(resultMap); out.closeEntry(); log.debug("处理GroupDefinitions.data文件完毕"); }catch(Exception ex){ log.error("解析GroupDefinitions.data文件失败!生成zip文件失败!"); throw new FoxBPMException("解析GroupDefinitions.data文件失败",ex); } }
Example 2
Source File: GenericEntitySerializer.java From secure-data-service with Apache License 2.0 | 6 votes |
@Override public void serialize(GenericEntity entity, JsonGenerator jgen, SerializerProvider provider) throws IOException { jgen.writeStartObject(); // The SLI API only supports entity body elements for PUT and POST requests. If the // entity data has a 'body' element, use that explicitly. if (entity.getData().containsKey(ENTITY_BODY_KEY)) { jgen.writeObject(serializeObject(entity.getData().get(ENTITY_BODY_KEY))); } else { for (Map.Entry<String, Object> entry : entity.getData().entrySet()) { if (entry.getKey().equals(ENTITY_LINKS_KEY) || entry.getKey().equals(ENTITY_METADATA_KEY)) { // ignore these read-only fields. continue; } jgen.writeFieldName(entry.getKey()); jgen.writeObject(serializeObject(entry.getValue())); } } jgen.writeEndObject(); }
Example 3
Source File: ContainerPlacementMessageObjectMapper.java From samza with Apache License 2.0 | 6 votes |
@Override public void serialize(ContainerPlacementMessage value, JsonGenerator jsonGenerator, SerializerProvider provider) throws IOException { Map<String, Object> containerPlacementMessageMap = new HashMap<String, Object>(); if (value instanceof ContainerPlacementRequestMessage) { containerPlacementMessageMap.put("subType", ContainerPlacementRequestMessage.class.getSimpleName()); } else if (value instanceof ContainerPlacementResponseMessage) { containerPlacementMessageMap.put("subType", ContainerPlacementResponseMessage.class.getSimpleName()); containerPlacementMessageMap.put("responseMessage", ((ContainerPlacementResponseMessage) value).getResponseMessage()); } if (value.getRequestExpiry() != null) { containerPlacementMessageMap.put("requestExpiry", value.getRequestExpiry().toMillis()); } containerPlacementMessageMap.put("uuid", value.getUuid().toString()); containerPlacementMessageMap.put("deploymentId", value.getDeploymentId()); containerPlacementMessageMap.put("processorId", value.getProcessorId()); containerPlacementMessageMap.put("destinationHost", value.getDestinationHost()); containerPlacementMessageMap.put("statusCode", value.getStatusCode().name()); containerPlacementMessageMap.put("timestamp", value.getTimestamp()); jsonGenerator.writeObject(containerPlacementMessageMap); }
Example 4
Source File: TaskCommandDefinitionsGenerator.java From FoxBPM with Apache License 2.0 | 6 votes |
public void generate(ZipOutputStream out) { try { log.debug("开始处理taskCommandDefinition.data文件。。。"); ProcessEngineConfigurationImpl processEngineConfigurationImpl = FoxBpmUtil.getProcessEngine().getProcessEngineConfiguration(); List<TaskCommandDefinition> list = processEngineConfigurationImpl.getTaskCommandDefinitions(); ObjectMapper objectMapper = new ObjectMapper(); JsonGenerator jsonGenerator = objectMapper.getJsonFactory().createJsonGenerator(out, JsonEncoding.UTF8); String tmpEntryName = "cache/taskCommandDefinition.data"; ZipEntry zipEntry = new ZipEntry(tmpEntryName); zipEntry.setMethod(ZipEntry.DEFLATED);// 设置条目的压缩方式 out.putNextEntry(zipEntry); jsonGenerator.writeObject(list); out.closeEntry(); log.debug("处理taskCommandDefinition.data文件完毕"); } catch (Exception ex) { log.error("解析taskCommandDefinition.data文件失败!生成zip文件失败!"); throw new FoxBPMException("解析taskCommandDefinition.data文件失败", ex); } }
Example 5
Source File: StatePool.java From big-c with Apache License 2.0 | 6 votes |
private void write(DataOutput out) throws IOException { // This is just a JSON experiment System.out.println("Dumping the StatePool's in JSON format."); ObjectMapper outMapper = new ObjectMapper(); outMapper.configure( SerializationConfig.Feature.CAN_OVERRIDE_ACCESS_MODIFIERS, true); // define a module SimpleModule module = new SimpleModule("State Serializer", new Version(0, 1, 1, "FINAL")); // add the state serializer //module.addSerializer(State.class, new StateSerializer()); // register the module with the object-mapper outMapper.registerModule(module); JsonFactory outFactory = outMapper.getJsonFactory(); JsonGenerator jGen = outFactory.createJsonGenerator((DataOutputStream)out, JsonEncoding.UTF8); jGen.useDefaultPrettyPrinter(); jGen.writeObject(this); jGen.close(); }
Example 6
Source File: Anonymizer.java From big-c with Apache License 2.0 | 5 votes |
private void anonymizeTopology() throws Exception { if (anonymizeTopology) { System.out.println("Anonymizing topology file: " + inputTopologyPath); ClusterTopologyReader reader = null; JsonGenerator outGen = null; Configuration conf = getConf(); try { // create a generator outGen = createJsonGenerator(conf, outputTopologyPath); // define the input cluster topology reader reader = new ClusterTopologyReader(inputTopologyPath, conf); // read the plain unanonymized logged job LoggedNetworkTopology job = reader.get(); // write it via an anonymizing channel outGen.writeObject(job); System.out.println("Anonymized topology file: " + outputTopologyPath); } finally { if (outGen != null) { outGen.close(); } } } }
Example 7
Source File: TestHistograms.java From big-c with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws IOException { final Configuration conf = new Configuration(); final FileSystem lfs = FileSystem.getLocal(conf); for (String arg : args) { Path filePath = new Path(arg).makeQualified(lfs); String fileName = filePath.getName(); if (fileName.startsWith("input")) { LoggedDiscreteCDF newResult = histogramFileToCDF(filePath, lfs); String testName = fileName.substring("input".length()); Path goldFilePath = new Path(filePath.getParent(), "gold"+testName); ObjectMapper mapper = new ObjectMapper(); JsonFactory factory = mapper.getJsonFactory(); FSDataOutputStream ostream = lfs.create(goldFilePath, true); JsonGenerator gen = factory.createJsonGenerator(ostream, JsonEncoding.UTF8); gen.useDefaultPrettyPrinter(); gen.writeObject(newResult); gen.close(); } else { System.err.println("Input file not started with \"input\". File "+fileName+" skipped."); } } }
Example 8
Source File: DefaultRumenSerializer.java From big-c with Apache License 2.0 | 5 votes |
public void serialize(DataType object, JsonGenerator jGen, SerializerProvider sProvider) throws IOException, JsonProcessingException { Object data = object.getValue(); if (data instanceof String) { jGen.writeString(data.toString()); } else { jGen.writeObject(data); } }
Example 9
Source File: ObjectMapperProvider.java From hraven with Apache License 2.0 | 5 votes |
/** * checks if the member is to be filtered out or no if filter itself is * null, writes out that member * * @param member * @param includeFilter * @param taskObject * @param jsonGenerator * @throws JsonGenerationException * @throws IOException */ public static void filteredWrite(String member, Predicate<String> includeFilter, Object taskObject, JsonGenerator jsonGenerator) throws JsonGenerationException, IOException { if (includeFilter != null) { if (includeFilter.apply(member)) { jsonGenerator.writeFieldName(member); jsonGenerator.writeObject(taskObject); } } else { jsonGenerator.writeFieldName(member); jsonGenerator.writeObject(taskObject); } }
Example 10
Source File: Anonymizer.java From big-c with Apache License 2.0 | 5 votes |
private void anonymizeTrace() throws Exception { if (anonymizeTrace) { System.out.println("Anonymizing trace file: " + inputTracePath); JobTraceReader reader = null; JsonGenerator outGen = null; Configuration conf = getConf(); try { // create a generator outGen = createJsonGenerator(conf, outputTracePath); // define the input trace reader reader = new JobTraceReader(inputTracePath, conf); // read the plain unanonymized logged job LoggedJob job = reader.getNext(); while (job != null) { // write it via an anonymizing channel outGen.writeObject(job); // read the next job job = reader.getNext(); } System.out.println("Anonymized trace file: " + outputTracePath); } finally { if (outGen != null) { outGen.close(); } if (reader != null) { reader.close(); } } } }
Example 11
Source File: TestHistograms.java From hadoop with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws IOException { final Configuration conf = new Configuration(); final FileSystem lfs = FileSystem.getLocal(conf); for (String arg : args) { Path filePath = new Path(arg).makeQualified(lfs); String fileName = filePath.getName(); if (fileName.startsWith("input")) { LoggedDiscreteCDF newResult = histogramFileToCDF(filePath, lfs); String testName = fileName.substring("input".length()); Path goldFilePath = new Path(filePath.getParent(), "gold"+testName); ObjectMapper mapper = new ObjectMapper(); JsonFactory factory = mapper.getJsonFactory(); FSDataOutputStream ostream = lfs.create(goldFilePath, true); JsonGenerator gen = factory.createJsonGenerator(ostream, JsonEncoding.UTF8); gen.useDefaultPrettyPrinter(); gen.writeObject(newResult); gen.close(); } else { System.err.println("Input file not started with \"input\". File "+fileName+" skipped."); } } }
Example 12
Source File: SamzaObjectMapper.java From samza with Apache License 2.0 | 5 votes |
@Override public void serialize(SystemStreamPartition systemStreamPartition, JsonGenerator jsonGenerator, SerializerProvider provider) throws IOException, JsonProcessingException { Map<String, Object> systemStreamPartitionMap = new HashMap<String, Object>(); systemStreamPartitionMap.put("system", systemStreamPartition.getSystem()); systemStreamPartitionMap.put("stream", systemStreamPartition.getStream()); systemStreamPartitionMap.put("partition", systemStreamPartition.getPartition()); jsonGenerator.writeObject(systemStreamPartitionMap); }
Example 13
Source File: DefaultAnonymizingRumenSerializer.java From hadoop with Apache License 2.0 | 5 votes |
public void serialize(AnonymizableDataType object, JsonGenerator jGen, SerializerProvider sProvider) throws IOException, JsonProcessingException { Object val = object.getAnonymizedValue(statePool, conf); // output the data if its a string if (val instanceof String) { jGen.writeString(val.toString()); } else { // let the mapper (JSON generator) handle this anonymized object. jGen.writeObject(val); } }
Example 14
Source File: Anonymizer.java From hadoop with Apache License 2.0 | 5 votes |
private void anonymizeTopology() throws Exception { if (anonymizeTopology) { System.out.println("Anonymizing topology file: " + inputTopologyPath); ClusterTopologyReader reader = null; JsonGenerator outGen = null; Configuration conf = getConf(); try { // create a generator outGen = createJsonGenerator(conf, outputTopologyPath); // define the input cluster topology reader reader = new ClusterTopologyReader(inputTopologyPath, conf); // read the plain unanonymized logged job LoggedNetworkTopology job = reader.get(); // write it via an anonymizing channel outGen.writeObject(job); System.out.println("Anonymized topology file: " + outputTopologyPath); } finally { if (outGen != null) { outGen.close(); } } } }
Example 15
Source File: TestHistograms.java From RDFS with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws IOException { final Configuration conf = new Configuration(); final FileSystem lfs = FileSystem.getLocal(conf); for (String arg : args) { Path filePath = new Path(arg).makeQualified(lfs); String fileName = filePath.getName(); if (fileName.startsWith("input")) { LoggedDiscreteCDF newResult = histogramFileToCDF(filePath, lfs); String testName = fileName.substring("input".length()); Path goldFilePath = new Path(filePath.getParent(), "gold"+testName); ObjectMapper mapper = new ObjectMapper(); JsonFactory factory = mapper.getJsonFactory(); FSDataOutputStream ostream = lfs.create(goldFilePath, true); JsonGenerator gen = factory.createJsonGenerator(ostream, JsonEncoding.UTF8); gen.useDefaultPrettyPrinter(); gen.writeObject(newResult); gen.close(); } else { System.err.println("Input file not started with \"input\". File "+fileName+" skipped."); } } }
Example 16
Source File: LogicalPlanSerializer.java From Bats with Apache License 2.0 | 4 votes |
@Override public void serialize(LogicalPlan dag, JsonGenerator jg, SerializerProvider sp) throws IOException { jg.writeObject(convertToMap(dag, false)); }
Example 17
Source File: PathToStringSerializer.java From jwala with Apache License 2.0 | 4 votes |
@Override public void serialize(final Path path, final JsonGenerator jsonGenerator, final SerializerProvider provider) throws IOException { jsonGenerator.writeObject(path.toString()); }
Example 18
Source File: StartpointObjectMapper.java From samza with Apache License 2.0 | 4 votes |
@Override public void serialize(Instant value, JsonGenerator jsonGenerator, SerializerProvider provider) throws IOException, JsonProcessingException { jsonGenerator.writeObject(Long.valueOf(value.toEpochMilli())); }
Example 19
Source File: ObjectMapperProvider.java From hraven with Apache License 2.0 | 4 votes |
/** * Writes out the flow object * * @param jsonGenerator * @param aFlow * @param selectedSerialization * @param includeFilter * @throws JsonGenerationException * @throws IOException */ @SuppressWarnings("deprecation") public static void writeFlowDetails(JsonGenerator jsonGenerator, Flow aFlow, DetailLevel selectedSerialization, Predicate<String> includeFilter) throws JsonGenerationException, IOException { jsonGenerator.writeStartObject(); // serialize the FlowKey object filteredWrite("flowKey", includeFilter, aFlow.getFlowKey(), jsonGenerator); // serialize individual members of this class filteredWrite("flowName", includeFilter, aFlow.getFlowName(), jsonGenerator); filteredWrite("userName", includeFilter, aFlow.getUserName(), jsonGenerator); filteredWrite("jobCount", includeFilter, aFlow.getJobCount(), jsonGenerator); filteredWrite("totalMaps", includeFilter, aFlow.getTotalMaps(), jsonGenerator); filteredWrite("totalReduces", includeFilter, aFlow.getTotalReduces(), jsonGenerator); filteredWrite("mapFileBytesRead", includeFilter, aFlow.getMapFileBytesRead(), jsonGenerator); filteredWrite("mapFileBytesWritten", includeFilter, aFlow.getMapFileBytesWritten(), jsonGenerator); filteredWrite("reduceFileBytesRead", includeFilter, aFlow.getReduceFileBytesRead(), jsonGenerator); filteredWrite("hdfsBytesRead", includeFilter, aFlow.getHdfsBytesRead(), jsonGenerator); filteredWrite("hdfsBytesWritten", includeFilter, aFlow.getHdfsBytesWritten(), jsonGenerator); filteredWrite("mapSlotMillis", includeFilter, aFlow.getMapSlotMillis(), jsonGenerator); filteredWrite("reduceSlotMillis", includeFilter, aFlow.getReduceSlotMillis(), jsonGenerator); filteredWrite("megabyteMillis", includeFilter, aFlow.getMegabyteMillis(), jsonGenerator); filteredWrite("cost", includeFilter, aFlow.getCost(), jsonGenerator); filteredWrite("reduceShuffleBytes", includeFilter, aFlow.getReduceShuffleBytes(), jsonGenerator); filteredWrite("duration", includeFilter, aFlow.getDuration(), jsonGenerator); filteredWrite("wallClockTime", includeFilter, aFlow.getWallClockTime(), jsonGenerator); filteredWrite("cluster", includeFilter, aFlow.getCluster(), jsonGenerator); filteredWrite("appId", includeFilter, aFlow.getAppId(), jsonGenerator); filteredWrite("runId", includeFilter, aFlow.getRunId(), jsonGenerator); filteredWrite("version", includeFilter, aFlow.getVersion(), jsonGenerator); filteredWrite("hadoopVersion", includeFilter, aFlow.getHadoopVersion(), jsonGenerator); if (selectedSerialization == SerializationContext.DetailLevel.EVERYTHING) { filteredWrite("submitTime", includeFilter, aFlow.getSubmitTime(), jsonGenerator); filteredWrite("launchTime", includeFilter, aFlow.getLaunchTime(), jsonGenerator); filteredWrite("finishTime", includeFilter, aFlow.getFinishTime(), jsonGenerator); } filteredWrite(Constants.HRAVEN_QUEUE, includeFilter, aFlow.getQueue(), jsonGenerator); filteredWrite("counters", includeFilter, aFlow.getCounters(), jsonGenerator); filteredWrite("mapCounters", includeFilter, aFlow.getMapCounters(), jsonGenerator); filteredWrite("reduceCounters", includeFilter, aFlow.getReduceCounters(), jsonGenerator); // if flag, include job details if ((selectedSerialization == SerializationContext.DetailLevel.FLOW_SUMMARY_STATS_WITH_JOB_STATS) || (selectedSerialization == SerializationContext.DetailLevel.EVERYTHING)) { jsonGenerator.writeFieldName("jobs"); jsonGenerator.writeObject(aFlow.getJobs()); } jsonGenerator.writeEndObject(); }
Example 20
Source File: SamzaObjectMapper.java From samza with Apache License 2.0 | 4 votes |
@Override public void serialize(TaskMode taskMode, JsonGenerator jsonGenerator, SerializerProvider provider) throws IOException, JsonProcessingException { jsonGenerator.writeObject(taskMode.toString()); }