org.apache.hadoop.metrics.spi.OutputRecord Java Examples
The following examples show how to use
org.apache.hadoop.metrics.spi.OutputRecord.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TestMetricsServlet.java From RDFS with Apache License 2.0 | 6 votes |
/** * Initializes, for testing, two NoEmitMetricsContext's, and adds one value * to the first of them. */ public void setUp() throws IOException { nc1 = new NoEmitMetricsContext(); nc1.init("test1", ContextFactory.getFactory()); nc2 = new NoEmitMetricsContext(); nc2.init("test2", ContextFactory.getFactory()); contexts = new ArrayList<MetricsContext>(); contexts.add(nc1); contexts.add(nc2); MetricsRecord r = nc1.createRecord("testRecord"); r.setTag("testTag1", "testTagValue1"); r.setTag("testTag2", "testTagValue2"); r.setMetric("testMetric1", 1); r.setMetric("testMetric2", 33); r.update(); Map<String, Collection<OutputRecord>> m = nc1.getAllRecords(); assertEquals(1, m.size()); assertEquals(1, m.values().size()); Collection<OutputRecord> outputRecords = m.values().iterator().next(); assertEquals(1, outputRecords.size()); outputRecord = outputRecords.iterator().next(); }
Example #2
Source File: GangliaContext.java From hadoop-gpu with Apache License 2.0 | 6 votes |
public void emitRecord(String contextName, String recordName, OutputRecord outRec) throws IOException { // Setup so that the records have the proper leader names so they are // unambiguous at the ganglia level, and this prevents a lot of rework StringBuilder sb = new StringBuilder(); sb.append(contextName); sb.append('.'); sb.append(recordName); sb.append('.'); int sbBaseLen = sb.length(); // emit each metric in turn for (String metricName : outRec.getMetricNames()) { Object metric = outRec.getMetric(metricName); String type = typeTable.get(metric.getClass()); if (type != null) { sb.append(metricName); emitMetric(sb.toString(), type, metric.toString()); sb.setLength(sbBaseLen); } else { LOG.warn("Unknown metrics type: " + metric.getClass()); } } }
Example #3
Source File: FileContext.java From hadoop-gpu with Apache License 2.0 | 6 votes |
/** * Emits a metrics record to a file. */ public void emitRecord(String contextName, String recordName, OutputRecord outRec) { writer.print(contextName); writer.print("."); writer.print(recordName); String separator = ": "; for (String tagName : outRec.getTagNames()) { writer.print(separator); separator = ", "; writer.print(tagName); writer.print("="); writer.print(outRec.getTag(tagName)); } for (String metricName : outRec.getMetricNames()) { writer.print(separator); separator = ", "; writer.print(metricName); writer.print("="); writer.print(outRec.getMetric(metricName)); } writer.println(); }
Example #4
Source File: MetricsServlet.java From RDFS with Apache License 2.0 | 6 votes |
/** * Collects all metric data, and returns a map: * contextName -> recordName -> [ (tag->tagValue), (metric->metricValue) ]. * The values are either String or Number. The final value is implemented * as a list of TagsMetricsPair. */ Map<String, Map<String, List<TagsMetricsPair>>> makeMap( Collection<MetricsContext> contexts) throws IOException { Map<String, Map<String, List<TagsMetricsPair>>> map = new TreeMap<String, Map<String, List<TagsMetricsPair>>>(); for (MetricsContext context : contexts) { Map<String, List<TagsMetricsPair>> records = new TreeMap<String, List<TagsMetricsPair>>(); map.put(context.getContextName(), records); for (Map.Entry<String, Collection<OutputRecord>> r : context.getAllRecords().entrySet()) { List<TagsMetricsPair> metricsAndTags = new ArrayList<TagsMetricsPair>(); records.put(r.getKey(), metricsAndTags); for (OutputRecord outputRecord : r.getValue()) { TagMap tagMap = outputRecord.getTagsCopy(); MetricMap metricMap = outputRecord.getMetricsCopy(); metricsAndTags.add(new TagsMetricsPair(tagMap, metricMap)); } } } return map; }
Example #5
Source File: GangliaContext.java From RDFS with Apache License 2.0 | 6 votes |
public void emitRecord(String contextName, String recordName, OutputRecord outRec) throws IOException { // Setup so that the records have the proper leader names so they are // unambiguous at the ganglia level, and this prevents a lot of rework StringBuilder sb = new StringBuilder(); sb.append(contextName); sb.append('.'); sb.append(recordName); sb.append('.'); int sbBaseLen = sb.length(); // emit each metric in turn for (String metricName : outRec.getMetricNames()) { Object metric = outRec.getMetric(metricName); String type = typeTable.get(metric.getClass()); if (type != null) { sb.append(metricName); emitMetric(sb.toString(), type, metric.toString()); sb.setLength(sbBaseLen); } else { LOG.warn("Unknown metrics type: " + metric.getClass()); } } }
Example #6
Source File: TestMetricsServlet.java From big-c with Apache License 2.0 | 6 votes |
/** * Initializes, for testing, two NoEmitMetricsContext's, and adds one value * to the first of them. */ @Override public void setUp() throws IOException { nc1 = new NoEmitMetricsContext(); nc1.init("test1", ContextFactory.getFactory()); nc2 = new NoEmitMetricsContext(); nc2.init("test2", ContextFactory.getFactory()); contexts = new ArrayList<MetricsContext>(); contexts.add(nc1); contexts.add(nc2); MetricsRecord r = nc1.createRecord("testRecord"); r.setTag("testTag1", "testTagValue1"); r.setTag("testTag2", "testTagValue2"); r.setMetric("testMetric1", 1); r.setMetric("testMetric2", 33); r.update(); Map<String, Collection<OutputRecord>> m = nc1.getAllRecords(); assertEquals(1, m.size()); assertEquals(1, m.values().size()); Collection<OutputRecord> outputRecords = m.values().iterator().next(); assertEquals(1, outputRecords.size()); outputRecord = outputRecords.iterator().next(); }
Example #7
Source File: MetricsServlet.java From big-c with Apache License 2.0 | 6 votes |
/** * Collects all metric data, and returns a map: * contextName -> recordName -> [ (tag->tagValue), (metric->metricValue) ]. * The values are either String or Number. The final value is implemented * as a list of TagsMetricsPair. */ Map<String, Map<String, List<TagsMetricsPair>>> makeMap( Collection<MetricsContext> contexts) throws IOException { Map<String, Map<String, List<TagsMetricsPair>>> map = new TreeMap<String, Map<String, List<TagsMetricsPair>>>(); for (MetricsContext context : contexts) { Map<String, List<TagsMetricsPair>> records = new TreeMap<String, List<TagsMetricsPair>>(); map.put(context.getContextName(), records); for (Map.Entry<String, Collection<OutputRecord>> r : context.getAllRecords().entrySet()) { List<TagsMetricsPair> metricsAndTags = new ArrayList<TagsMetricsPair>(); records.put(r.getKey(), metricsAndTags); for (OutputRecord outputRecord : r.getValue()) { TagMap tagMap = outputRecord.getTagsCopy(); MetricMap metricMap = outputRecord.getMetricsCopy(); metricsAndTags.add(new TagsMetricsPair(tagMap, metricMap)); } } } return map; }
Example #8
Source File: TestMetricsServlet.java From hadoop with Apache License 2.0 | 6 votes |
/** * Initializes, for testing, two NoEmitMetricsContext's, and adds one value * to the first of them. */ @Override public void setUp() throws IOException { nc1 = new NoEmitMetricsContext(); nc1.init("test1", ContextFactory.getFactory()); nc2 = new NoEmitMetricsContext(); nc2.init("test2", ContextFactory.getFactory()); contexts = new ArrayList<MetricsContext>(); contexts.add(nc1); contexts.add(nc2); MetricsRecord r = nc1.createRecord("testRecord"); r.setTag("testTag1", "testTagValue1"); r.setTag("testTag2", "testTagValue2"); r.setMetric("testMetric1", 1); r.setMetric("testMetric2", 33); r.update(); Map<String, Collection<OutputRecord>> m = nc1.getAllRecords(); assertEquals(1, m.size()); assertEquals(1, m.values().size()); Collection<OutputRecord> outputRecords = m.values().iterator().next(); assertEquals(1, outputRecords.size()); outputRecord = outputRecords.iterator().next(); }
Example #9
Source File: MetricsServlet.java From hadoop with Apache License 2.0 | 6 votes |
/** * Collects all metric data, and returns a map: * contextName -> recordName -> [ (tag->tagValue), (metric->metricValue) ]. * The values are either String or Number. The final value is implemented * as a list of TagsMetricsPair. */ Map<String, Map<String, List<TagsMetricsPair>>> makeMap( Collection<MetricsContext> contexts) throws IOException { Map<String, Map<String, List<TagsMetricsPair>>> map = new TreeMap<String, Map<String, List<TagsMetricsPair>>>(); for (MetricsContext context : contexts) { Map<String, List<TagsMetricsPair>> records = new TreeMap<String, List<TagsMetricsPair>>(); map.put(context.getContextName(), records); for (Map.Entry<String, Collection<OutputRecord>> r : context.getAllRecords().entrySet()) { List<TagsMetricsPair> metricsAndTags = new ArrayList<TagsMetricsPair>(); records.put(r.getKey(), metricsAndTags); for (OutputRecord outputRecord : r.getValue()) { TagMap tagMap = outputRecord.getTagsCopy(); MetricMap metricMap = outputRecord.getMetricsCopy(); metricsAndTags.add(new TagsMetricsPair(tagMap, metricMap)); } } } return map; }
Example #10
Source File: GangliaContext.java From big-c with Apache License 2.0 | 5 votes |
@Override @InterfaceAudience.Private public void emitRecord(String contextName, String recordName, OutputRecord outRec) throws IOException { // Setup so that the records have the proper leader names so they are // unambiguous at the ganglia level, and this prevents a lot of rework StringBuilder sb = new StringBuilder(); sb.append(contextName); sb.append('.'); if (contextName.equals("jvm") && outRec.getTag("processName") != null) { sb.append(outRec.getTag("processName")); sb.append('.'); } sb.append(recordName); sb.append('.'); int sbBaseLen = sb.length(); // emit each metric in turn for (String metricName : outRec.getMetricNames()) { Object metric = outRec.getMetric(metricName); String type = typeTable.get(metric.getClass()); if (type != null) { sb.append(metricName); emitMetric(sb.toString(), type, metric.toString()); sb.setLength(sbBaseLen); } else { LOG.warn("Unknown metrics type: " + metric.getClass()); } } }
Example #11
Source File: ClusterManagerMetricsVerifier.java From RDFS with Apache License 2.0 | 5 votes |
private void verifyMetrics(String name, int expectValue) throws Exception { MetricsContext context = MetricsUtil.getContext( ClusterManagerMetrics.CONTEXT_NAME); cm.metrics.doUpdates(context); OutputRecord record = context.getAllRecords().get( ClusterManagerMetrics.CONTEXT_NAME).iterator().next(); Assert.assertEquals(expectValue, record.getMetric(name).intValue()); }
Example #12
Source File: JMXContext.java From RDFS with Apache License 2.0 | 5 votes |
@Override protected void emitRecord(String contextName, String recordName, OutputRecord outRec) throws IOException { JMXContextMBean bean = JMXBeans.get(recordName); if (bean != null) { bean.processMetricsRecord(outRec); } }
Example #13
Source File: JMXContextMBean.java From RDFS with Apache License 2.0 | 5 votes |
public void processMetricsRecord(OutputRecord outRec) { for (String metricName : outRec.getMetricNames()) { Number metricValue = outRec.getMetric(metricName); metrics.put(metricName, metricValue); } }
Example #14
Source File: GangliaContext.java From hadoop with Apache License 2.0 | 5 votes |
@Override @InterfaceAudience.Private public void emitRecord(String contextName, String recordName, OutputRecord outRec) throws IOException { // Setup so that the records have the proper leader names so they are // unambiguous at the ganglia level, and this prevents a lot of rework StringBuilder sb = new StringBuilder(); sb.append(contextName); sb.append('.'); if (contextName.equals("jvm") && outRec.getTag("processName") != null) { sb.append(outRec.getTag("processName")); sb.append('.'); } sb.append(recordName); sb.append('.'); int sbBaseLen = sb.length(); // emit each metric in turn for (String metricName : outRec.getMetricNames()) { Object metric = outRec.getMetric(metricName); String type = typeTable.get(metric.getClass()); if (type != null) { sb.append(metricName); emitMetric(sb.toString(), type, metric.toString()); sb.setLength(sbBaseLen); } else { LOG.warn("Unknown metrics type: " + metric.getClass()); } } }
Example #15
Source File: MetricsContext.java From hadoop with Apache License 2.0 | 2 votes |
/** * Retrieves all the records managed by this MetricsContext. * Useful for monitoring systems that are polling-based. * * @return A non-null map from all record names to the records managed. */ Map<String, Collection<OutputRecord>> getAllRecords();
Example #16
Source File: MetricsContext.java From RDFS with Apache License 2.0 | 2 votes |
/** * Retrieves all the records managed by this MetricsContext. * Useful for monitoring systems that are polling-based. * * @return A non-null map from all record names to the records managed. */ public abstract Map<String, Collection<OutputRecord>> getAllRecords();
Example #17
Source File: MetricsContext.java From big-c with Apache License 2.0 | 2 votes |
/** * Retrieves all the records managed by this MetricsContext. * Useful for monitoring systems that are polling-based. * * @return A non-null map from all record names to the records managed. */ Map<String, Collection<OutputRecord>> getAllRecords();