Java Code Examples for org.codehaus.jackson.JsonGenerator#writeFieldName()
The following examples show how to use
org.codehaus.jackson.JsonGenerator#writeFieldName() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ObjectMapperProvider.java From hraven with Apache License 2.0 | 6 votes |
@Override public void serialize(CounterMap counterMap, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException { jsonGenerator.writeStartObject(); for (String group : counterMap.getGroups()) { jsonGenerator.writeFieldName(group); jsonGenerator.writeStartObject(); Map<String, Counter> groupMap = counterMap.getGroup(group); for (String counterName : groupMap.keySet()) { Counter counter = groupMap.get(counterName); jsonGenerator.writeFieldName(counter.getKey()); jsonGenerator.writeNumber(counter.getValue()); } jsonGenerator.writeEndObject(); } jsonGenerator.writeEndObject(); }
Example 2
Source File: GenericEntitySerializer.java From secure-data-service with Apache License 2.0 | 6 votes |
@Override public void serialize(GenericEntity entity, JsonGenerator jgen, SerializerProvider provider) throws IOException { jgen.writeStartObject(); // The SLI API only supports entity body elements for PUT and POST requests. If the // entity data has a 'body' element, use that explicitly. if (entity.getData().containsKey(ENTITY_BODY_KEY)) { jgen.writeObject(serializeObject(entity.getData().get(ENTITY_BODY_KEY))); } else { for (Map.Entry<String, Object> entry : entity.getData().entrySet()) { if (entry.getKey().equals(ENTITY_LINKS_KEY) || entry.getKey().equals(ENTITY_METADATA_KEY)) { // ignore these read-only fields. continue; } jgen.writeFieldName(entry.getKey()); jgen.writeObject(serializeObject(entry.getValue())); } } jgen.writeEndObject(); }
Example 3
Source File: ObjectMapperProvider.java From hraven with Apache License 2.0 | 6 votes |
@Override public void serialize(Configuration conf, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException { SerializationContext context = RestResource.serializationContext .get(); Predicate<String> configFilter = context.getConfigurationFilter(); Iterator<Map.Entry<String, String>> keyValueIterator = conf.iterator(); jsonGenerator.writeStartObject(); // here's where we can filter out keys if we want while (keyValueIterator.hasNext()) { Map.Entry<String, String> kvp = keyValueIterator.next(); if (configFilter == null || configFilter.apply(kvp.getKey())) { jsonGenerator.writeFieldName(kvp.getKey()); jsonGenerator.writeString(kvp.getValue()); } } jsonGenerator.writeEndObject(); }
Example 4
Source File: Configuration.java From RDFS with Apache License 2.0 | 6 votes |
/** * Writes out all the parameters and their properties (final and resource) to * the given {@link Writer} * The format of the output would be * { "properties" : [ {key1,value1,key1.isFinal,key1.resource}, {key2,value2, * key2.isFinal,key2.resource}... ] } * It does not output the parameters of the configuration object which is * loaded from an input stream. * @param out the Writer to write to * @throws IOException */ public static void dumpConfiguration(Configuration config, Writer out) throws IOException { JsonFactory dumpFactory = new JsonFactory(); JsonGenerator dumpGenerator = dumpFactory.createJsonGenerator(out); dumpGenerator.writeStartObject(); dumpGenerator.writeFieldName("properties"); dumpGenerator.writeStartArray(); dumpGenerator.flush(); synchronized (config) { for (Map.Entry<Object,Object> item: config.getProps().entrySet()) { dumpGenerator.writeStartObject(); dumpGenerator.writeStringField("key", (String) item.getKey()); dumpGenerator.writeStringField("value", config.get((String) item.getKey())); dumpGenerator.writeBooleanField("isFinal", config.finalParameters.contains(item.getKey())); dumpGenerator.writeStringField("resource", config.updatingResource.get(item.getKey())); dumpGenerator.writeEndObject(); } } dumpGenerator.writeEndArray(); dumpGenerator.writeEndObject(); dumpGenerator.flush(); }
Example 5
Source File: Configuration.java From flink with Apache License 2.0 | 5 votes |
/** * Writes out all the parameters and their properties (final and resource) to * the given {@link Writer} * The format of the output would be * { "properties" : [ {key1,value1,key1.isFinal,key1.resource}, {key2,value2, * key2.isFinal,key2.resource}... ] } * It does not output the parameters of the configuration object which is * loaded from an input stream. * @param out the Writer to write to * @throws IOException */ public static void dumpConfiguration(Configuration config, Writer out) throws IOException { JsonFactory dumpFactory = new JsonFactory(); JsonGenerator dumpGenerator = dumpFactory.createJsonGenerator(out); dumpGenerator.writeStartObject(); dumpGenerator.writeFieldName("properties"); dumpGenerator.writeStartArray(); dumpGenerator.flush(); synchronized (config) { for (Entry<Object,Object> item: config.getProps().entrySet()) { dumpGenerator.writeStartObject(); dumpGenerator.writeStringField("key", (String) item.getKey()); dumpGenerator.writeStringField("value", config.get((String) item.getKey())); dumpGenerator.writeBooleanField("isFinal", config.finalParameters.contains(item.getKey())); String[] resources = config.updatingResource.get(item.getKey()); String resource = UNKNOWN_RESOURCE; if(resources != null && resources.length > 0) { resource = resources[0]; } dumpGenerator.writeStringField("resource", resource); dumpGenerator.writeEndObject(); } } dumpGenerator.writeEndArray(); dumpGenerator.writeEndObject(); dumpGenerator.flush(); }
Example 6
Source File: RecordWithMetadataToEnvelopedRecordWithMetadata.java From incubator-gobblin with Apache License 2.0 | 5 votes |
private void writeRecord(RecordWithMetadata<?> inputRecord, JsonGenerator generator) throws IOException { if (shouldInterpretRecordAsUtf8ByteArray(inputRecord)) { generator.writeFieldName("r"); byte[] bytes = (byte[]) inputRecord.getRecord(); generator.writeUTF8String(bytes, 0, bytes.length); } else { generator.writeObjectField("r", inputRecord.getRecord()); } }
Example 7
Source File: Configuration.java From flink with Apache License 2.0 | 5 votes |
/** * Writes out all the parameters and their properties (final and resource) to * the given {@link Writer} * The format of the output would be * { "properties" : [ {key1,value1,key1.isFinal,key1.resource}, {key2,value2, * key2.isFinal,key2.resource}... ] } * It does not output the parameters of the configuration object which is * loaded from an input stream. * @param out the Writer to write to * @throws IOException */ public static void dumpConfiguration(Configuration config, Writer out) throws IOException { JsonFactory dumpFactory = new JsonFactory(); JsonGenerator dumpGenerator = dumpFactory.createJsonGenerator(out); dumpGenerator.writeStartObject(); dumpGenerator.writeFieldName("properties"); dumpGenerator.writeStartArray(); dumpGenerator.flush(); synchronized (config) { for (Entry<Object,Object> item: config.getProps().entrySet()) { dumpGenerator.writeStartObject(); dumpGenerator.writeStringField("key", (String) item.getKey()); dumpGenerator.writeStringField("value", config.get((String) item.getKey())); dumpGenerator.writeBooleanField("isFinal", config.finalParameters.contains(item.getKey())); String[] resources = config.updatingResource.get(item.getKey()); String resource = UNKNOWN_RESOURCE; if(resources != null && resources.length > 0) { resource = resources[0]; } dumpGenerator.writeStringField("resource", resource); dumpGenerator.writeEndObject(); } } dumpGenerator.writeEndArray(); dumpGenerator.writeEndObject(); dumpGenerator.flush(); }
Example 8
Source File: Configuration.java From big-c with Apache License 2.0 | 5 votes |
/** * Writes out all the parameters and their properties (final and resource) to * the given {@link Writer} * The format of the output would be * { "properties" : [ {key1,value1,key1.isFinal,key1.resource}, {key2,value2, * key2.isFinal,key2.resource}... ] } * It does not output the parameters of the configuration object which is * loaded from an input stream. * @param out the Writer to write to * @throws IOException */ public static void dumpConfiguration(Configuration config, Writer out) throws IOException { JsonFactory dumpFactory = new JsonFactory(); JsonGenerator dumpGenerator = dumpFactory.createJsonGenerator(out); dumpGenerator.writeStartObject(); dumpGenerator.writeFieldName("properties"); dumpGenerator.writeStartArray(); dumpGenerator.flush(); synchronized (config) { for (Map.Entry<Object,Object> item: config.getProps().entrySet()) { dumpGenerator.writeStartObject(); dumpGenerator.writeStringField("key", (String) item.getKey()); dumpGenerator.writeStringField("value", config.get((String) item.getKey())); dumpGenerator.writeBooleanField("isFinal", config.finalParameters.contains(item.getKey())); String[] resources = config.updatingResource.get(item.getKey()); String resource = UNKNOWN_RESOURCE; if(resources != null && resources.length > 0) { resource = resources[0]; } dumpGenerator.writeStringField("resource", resource); dumpGenerator.writeEndObject(); } } dumpGenerator.writeEndArray(); dumpGenerator.writeEndObject(); dumpGenerator.flush(); }
Example 9
Source File: QueueManager.java From big-c with Apache License 2.0 | 5 votes |
/*** * Dumps the configuration of hierarchy of queues with * the xml file path given. It is to be used directly ONLY FOR TESTING. * @param out the writer object to which dump is written to. * @param configFile the filename of xml file * @throws IOException */ static void dumpConfiguration(Writer out, String configFile, Configuration conf) throws IOException { if (conf != null && conf.get(DeprecatedQueueConfigurationParser. MAPRED_QUEUE_NAMES_KEY) != null) { return; } JsonFactory dumpFactory = new JsonFactory(); JsonGenerator dumpGenerator = dumpFactory.createJsonGenerator(out); QueueConfigurationParser parser; boolean aclsEnabled = false; if (conf != null) { aclsEnabled = conf.getBoolean(MRConfig.MR_ACLS_ENABLED, false); } if (configFile != null && !"".equals(configFile)) { parser = new QueueConfigurationParser(configFile, aclsEnabled); } else { parser = getQueueConfigurationParser(null, false, aclsEnabled); } dumpGenerator.writeStartObject(); dumpGenerator.writeFieldName("queues"); dumpGenerator.writeStartArray(); dumpConfiguration(dumpGenerator,parser.getRoot().getChildren()); dumpGenerator.writeEndArray(); dumpGenerator.writeEndObject(); dumpGenerator.flush(); }
Example 10
Source File: MediaTypeSerializer.java From jwala with Apache License 2.0 | 5 votes |
@Override public void serialize(final MediaType mediaType, final JsonGenerator generator, final SerializerProvider provider) throws IOException { generator.writeStartObject(); generator.writeFieldName("name"); generator.writeString(mediaType.name()); generator.writeFieldName("displayName"); generator.writeString(mediaType.getDisplayName()); generator.writeEndObject(); }
Example 11
Source File: Configuration.java From hadoop with Apache License 2.0 | 5 votes |
/** * Writes out all the parameters and their properties (final and resource) to * the given {@link Writer} * The format of the output would be * { "properties" : [ {key1,value1,key1.isFinal,key1.resource}, {key2,value2, * key2.isFinal,key2.resource}... ] } * It does not output the parameters of the configuration object which is * loaded from an input stream. * @param out the Writer to write to * @throws IOException */ public static void dumpConfiguration(Configuration config, Writer out) throws IOException { JsonFactory dumpFactory = new JsonFactory(); JsonGenerator dumpGenerator = dumpFactory.createJsonGenerator(out); dumpGenerator.writeStartObject(); dumpGenerator.writeFieldName("properties"); dumpGenerator.writeStartArray(); dumpGenerator.flush(); synchronized (config) { for (Map.Entry<Object,Object> item: config.getProps().entrySet()) { dumpGenerator.writeStartObject(); dumpGenerator.writeStringField("key", (String) item.getKey()); dumpGenerator.writeStringField("value", config.get((String) item.getKey())); dumpGenerator.writeBooleanField("isFinal", config.finalParameters.contains(item.getKey())); String[] resources = config.updatingResource.get(item.getKey()); String resource = UNKNOWN_RESOURCE; if(resources != null && resources.length > 0) { resource = resources[0]; } dumpGenerator.writeStringField("resource", resource); dumpGenerator.writeEndObject(); } } dumpGenerator.writeEndArray(); dumpGenerator.writeEndObject(); dumpGenerator.flush(); }
Example 12
Source File: QueueManager.java From hadoop with Apache License 2.0 | 5 votes |
/*** * Dumps the configuration of hierarchy of queues with * the xml file path given. It is to be used directly ONLY FOR TESTING. * @param out the writer object to which dump is written to. * @param configFile the filename of xml file * @throws IOException */ static void dumpConfiguration(Writer out, String configFile, Configuration conf) throws IOException { if (conf != null && conf.get(DeprecatedQueueConfigurationParser. MAPRED_QUEUE_NAMES_KEY) != null) { return; } JsonFactory dumpFactory = new JsonFactory(); JsonGenerator dumpGenerator = dumpFactory.createJsonGenerator(out); QueueConfigurationParser parser; boolean aclsEnabled = false; if (conf != null) { aclsEnabled = conf.getBoolean(MRConfig.MR_ACLS_ENABLED, false); } if (configFile != null && !"".equals(configFile)) { parser = new QueueConfigurationParser(configFile, aclsEnabled); } else { parser = getQueueConfigurationParser(null, false, aclsEnabled); } dumpGenerator.writeStartObject(); dumpGenerator.writeFieldName("queues"); dumpGenerator.writeStartArray(); dumpConfiguration(dumpGenerator,parser.getRoot().getChildren()); dumpGenerator.writeEndArray(); dumpGenerator.writeEndObject(); dumpGenerator.flush(); }
Example 13
Source File: ObjectMapperProvider.java From hraven with Apache License 2.0 | 5 votes |
/** * checks if the member is to be filtered out or no if filter itself is * null, writes out that member * * @param member * @param includeFilter * @param taskObject * @param jsonGenerator * @throws JsonGenerationException * @throws IOException */ public static void filteredWrite(String member, Predicate<String> includeFilter, Object taskObject, JsonGenerator jsonGenerator) throws JsonGenerationException, IOException { if (includeFilter != null) { if (includeFilter.apply(member)) { jsonGenerator.writeFieldName(member); jsonGenerator.writeObject(taskObject); } } else { jsonGenerator.writeFieldName(member); jsonGenerator.writeObject(taskObject); } }
Example 14
Source File: QueueManager.java From big-c with Apache License 2.0 | 4 votes |
/** * method to perform depth-first search and write the parameters of every * queue in JSON format. * @param dumpGenerator JsonGenerator object which takes the dump and flushes * to a writer object * @param rootQueues the top-level queues * @throws JsonGenerationException * @throws IOException */ private static void dumpConfiguration(JsonGenerator dumpGenerator, Set<Queue> rootQueues) throws JsonGenerationException, IOException { for (Queue queue : rootQueues) { dumpGenerator.writeStartObject(); dumpGenerator.writeStringField("name", queue.getName()); dumpGenerator.writeStringField("state", queue.getState().toString()); AccessControlList submitJobList = null; AccessControlList administerJobsList = null; if (queue.getAcls() != null) { submitJobList = queue.getAcls().get(toFullPropertyName(queue.getName(), QueueACL.SUBMIT_JOB.getAclName())); administerJobsList = queue.getAcls().get(toFullPropertyName(queue.getName(), QueueACL.ADMINISTER_JOBS.getAclName())); } String aclsSubmitJobValue = " "; if (submitJobList != null ) { aclsSubmitJobValue = submitJobList.getAclString(); } dumpGenerator.writeStringField("acl_submit_job", aclsSubmitJobValue); String aclsAdministerValue = " "; if (administerJobsList != null) { aclsAdministerValue = administerJobsList.getAclString(); } dumpGenerator.writeStringField("acl_administer_jobs", aclsAdministerValue); dumpGenerator.writeFieldName("properties"); dumpGenerator.writeStartArray(); if (queue.getProperties() != null) { for (Map.Entry<Object, Object>property : queue.getProperties().entrySet()) { dumpGenerator.writeStartObject(); dumpGenerator.writeStringField("key", (String)property.getKey()); dumpGenerator.writeStringField("value", (String)property.getValue()); dumpGenerator.writeEndObject(); } } dumpGenerator.writeEndArray(); Set<Queue> childQueues = queue.getChildren(); dumpGenerator.writeFieldName("children"); dumpGenerator.writeStartArray(); if (childQueues != null && childQueues.size() > 0) { dumpConfiguration(dumpGenerator, childQueues); } dumpGenerator.writeEndArray(); dumpGenerator.writeEndObject(); } }
Example 15
Source File: JMXJsonServlet.java From big-c with Apache License 2.0 | 4 votes |
private void writeAttribute(JsonGenerator jg, String attName, Object value) throws IOException { jg.writeFieldName(attName); writeObject(jg, value); }
Example 16
Source File: SamzaObjectMapper.java From samza with Apache License 2.0 | 4 votes |
@Override public void serialize(SystemStreamPartition ssp, JsonGenerator jgen, SerializerProvider provider) throws IOException { String sspString = ssp.getSystem() + "." + ssp.getStream() + "." + String.valueOf(ssp.getPartition().getPartitionId()); jgen.writeFieldName(sspString); }
Example 17
Source File: ObjectMapperProvider.java From hraven with Apache License 2.0 | 4 votes |
/** * checks if the member is to be filtered out or no if filter itself is * null, writes out that member * * @param member * @param includeFilter * @param jsonGenerator * @throws JsonGenerationException * @throws IOException */ public static void filteredCounterWrite(String member, Predicate<String> includeFilter, Predicate<String> includeCounterFilter, CounterMap counterMap, JsonGenerator jsonGenerator) throws IOException { if (includeFilter != null && includeCounterFilter == null) { if (includeFilter.apply(member)) { jsonGenerator.writeFieldName(member); jsonGenerator.writeObject(counterMap); } } else { if (includeCounterFilter != null) { // get group name, counter name, // check if it is wanted // if yes print it. boolean startObjectGroupMap = false; jsonGenerator.writeFieldName(member); String fullCounterName; jsonGenerator.writeStartObject(); for (String group : counterMap.getGroups()) { Map<String, Counter> groupMap = counterMap.getGroup(group); for (Map.Entry<String, Counter> nameCounterEntry : groupMap.entrySet()) { Counter counter = nameCounterEntry.getValue(); fullCounterName = group + "." + counter.getKey(); if (includeCounterFilter.apply(fullCounterName)) { if (startObjectGroupMap == false) { jsonGenerator.writeFieldName(group); jsonGenerator.writeStartObject(); startObjectGroupMap = true; } jsonGenerator.writeFieldName(counter.getKey()); jsonGenerator.writeNumber(counter.getValue()); } } if (startObjectGroupMap) { jsonGenerator.writeEndObject(); startObjectGroupMap = false; } } jsonGenerator.writeEndObject(); } } }
Example 18
Source File: ParquetAsJsonInputFormat.java From iow-hadoop-streaming with Apache License 2.0 | 4 votes |
private void groupToJson(JsonGenerator currentGenerator, SimpleGroup grp) throws IOException { GroupType gt = grp.getType(); currentGenerator.writeStartObject(); for(int i = 0; i < gt.getFieldCount(); i ++) { String field = gt.getFieldName(i); try { Type t = gt.getType(i); int repetition = 1; boolean repeated = false; if (t.getRepetition() == Type.Repetition.REPEATED) { repeated = true; repetition = grp.getFieldRepetitionCount(i); currentGenerator.writeArrayFieldStart(field); } else currentGenerator.writeFieldName(field); for(int j = 0; j < repetition; j ++) { if (t.isPrimitive()) { switch (t.asPrimitiveType().getPrimitiveTypeName()) { case BINARY: currentGenerator.writeString(grp.getString(i, j)); break; case INT32: currentGenerator.writeNumber(grp.getInteger(i, j)); break; case INT96: case INT64: // clumsy way - TODO - Subclass SimpleGroup or something like that currentGenerator.writeNumber(Long.parseLong(grp.getValueToString(i, j))); break; case DOUBLE: case FLOAT: currentGenerator.writeNumber(Double.parseDouble(grp.getValueToString(i, j))); break; case BOOLEAN: currentGenerator.writeBoolean(grp.getBoolean(i, j)); break; default: throw new RuntimeException("Can't handle type " + gt.getType(i)); } } else { groupToJson(currentGenerator, (SimpleGroup) grp.getGroup(i, j)); } } if (repeated) currentGenerator.writeEndArray(); } catch (Exception e) { if (e.getMessage().startsWith("not found") && gt.getType(i).getRepetition() == Type.Repetition.OPTIONAL) currentGenerator.writeNull(); else throw new RuntimeException(e); } } currentGenerator.writeEndObject(); }
Example 19
Source File: QueueManager.java From hadoop with Apache License 2.0 | 4 votes |
/** * method to perform depth-first search and write the parameters of every * queue in JSON format. * @param dumpGenerator JsonGenerator object which takes the dump and flushes * to a writer object * @param rootQueues the top-level queues * @throws JsonGenerationException * @throws IOException */ private static void dumpConfiguration(JsonGenerator dumpGenerator, Set<Queue> rootQueues) throws JsonGenerationException, IOException { for (Queue queue : rootQueues) { dumpGenerator.writeStartObject(); dumpGenerator.writeStringField("name", queue.getName()); dumpGenerator.writeStringField("state", queue.getState().toString()); AccessControlList submitJobList = null; AccessControlList administerJobsList = null; if (queue.getAcls() != null) { submitJobList = queue.getAcls().get(toFullPropertyName(queue.getName(), QueueACL.SUBMIT_JOB.getAclName())); administerJobsList = queue.getAcls().get(toFullPropertyName(queue.getName(), QueueACL.ADMINISTER_JOBS.getAclName())); } String aclsSubmitJobValue = " "; if (submitJobList != null ) { aclsSubmitJobValue = submitJobList.getAclString(); } dumpGenerator.writeStringField("acl_submit_job", aclsSubmitJobValue); String aclsAdministerValue = " "; if (administerJobsList != null) { aclsAdministerValue = administerJobsList.getAclString(); } dumpGenerator.writeStringField("acl_administer_jobs", aclsAdministerValue); dumpGenerator.writeFieldName("properties"); dumpGenerator.writeStartArray(); if (queue.getProperties() != null) { for (Map.Entry<Object, Object>property : queue.getProperties().entrySet()) { dumpGenerator.writeStartObject(); dumpGenerator.writeStringField("key", (String)property.getKey()); dumpGenerator.writeStringField("value", (String)property.getValue()); dumpGenerator.writeEndObject(); } } dumpGenerator.writeEndArray(); Set<Queue> childQueues = queue.getChildren(); dumpGenerator.writeFieldName("children"); dumpGenerator.writeStartArray(); if (childQueues != null && childQueues.size() > 0) { dumpConfiguration(dumpGenerator, childQueues); } dumpGenerator.writeEndArray(); dumpGenerator.writeEndObject(); } }
Example 20
Source File: ObjectMapperProvider.java From hraven with Apache License 2.0 | 4 votes |
/** * Writes out the flow object * * @param jsonGenerator * @param aFlow * @param selectedSerialization * @param includeFilter * @throws JsonGenerationException * @throws IOException */ @SuppressWarnings("deprecation") public static void writeFlowDetails(JsonGenerator jsonGenerator, Flow aFlow, DetailLevel selectedSerialization, Predicate<String> includeFilter) throws JsonGenerationException, IOException { jsonGenerator.writeStartObject(); // serialize the FlowKey object filteredWrite("flowKey", includeFilter, aFlow.getFlowKey(), jsonGenerator); // serialize individual members of this class filteredWrite("flowName", includeFilter, aFlow.getFlowName(), jsonGenerator); filteredWrite("userName", includeFilter, aFlow.getUserName(), jsonGenerator); filteredWrite("jobCount", includeFilter, aFlow.getJobCount(), jsonGenerator); filteredWrite("totalMaps", includeFilter, aFlow.getTotalMaps(), jsonGenerator); filteredWrite("totalReduces", includeFilter, aFlow.getTotalReduces(), jsonGenerator); filteredWrite("mapFileBytesRead", includeFilter, aFlow.getMapFileBytesRead(), jsonGenerator); filteredWrite("mapFileBytesWritten", includeFilter, aFlow.getMapFileBytesWritten(), jsonGenerator); filteredWrite("reduceFileBytesRead", includeFilter, aFlow.getReduceFileBytesRead(), jsonGenerator); filteredWrite("hdfsBytesRead", includeFilter, aFlow.getHdfsBytesRead(), jsonGenerator); filteredWrite("hdfsBytesWritten", includeFilter, aFlow.getHdfsBytesWritten(), jsonGenerator); filteredWrite("mapSlotMillis", includeFilter, aFlow.getMapSlotMillis(), jsonGenerator); filteredWrite("reduceSlotMillis", includeFilter, aFlow.getReduceSlotMillis(), jsonGenerator); filteredWrite("megabyteMillis", includeFilter, aFlow.getMegabyteMillis(), jsonGenerator); filteredWrite("cost", includeFilter, aFlow.getCost(), jsonGenerator); filteredWrite("reduceShuffleBytes", includeFilter, aFlow.getReduceShuffleBytes(), jsonGenerator); filteredWrite("duration", includeFilter, aFlow.getDuration(), jsonGenerator); filteredWrite("wallClockTime", includeFilter, aFlow.getWallClockTime(), jsonGenerator); filteredWrite("cluster", includeFilter, aFlow.getCluster(), jsonGenerator); filteredWrite("appId", includeFilter, aFlow.getAppId(), jsonGenerator); filteredWrite("runId", includeFilter, aFlow.getRunId(), jsonGenerator); filteredWrite("version", includeFilter, aFlow.getVersion(), jsonGenerator); filteredWrite("hadoopVersion", includeFilter, aFlow.getHadoopVersion(), jsonGenerator); if (selectedSerialization == SerializationContext.DetailLevel.EVERYTHING) { filteredWrite("submitTime", includeFilter, aFlow.getSubmitTime(), jsonGenerator); filteredWrite("launchTime", includeFilter, aFlow.getLaunchTime(), jsonGenerator); filteredWrite("finishTime", includeFilter, aFlow.getFinishTime(), jsonGenerator); } filteredWrite(Constants.HRAVEN_QUEUE, includeFilter, aFlow.getQueue(), jsonGenerator); filteredWrite("counters", includeFilter, aFlow.getCounters(), jsonGenerator); filteredWrite("mapCounters", includeFilter, aFlow.getMapCounters(), jsonGenerator); filteredWrite("reduceCounters", includeFilter, aFlow.getReduceCounters(), jsonGenerator); // if flag, include job details if ((selectedSerialization == SerializationContext.DetailLevel.FLOW_SUMMARY_STATS_WITH_JOB_STATS) || (selectedSerialization == SerializationContext.DetailLevel.EVERYTHING)) { jsonGenerator.writeFieldName("jobs"); jsonGenerator.writeObject(aFlow.getJobs()); } jsonGenerator.writeEndObject(); }