com.amazonaws.services.cloudwatch.model.StatisticSet Java Examples

The following examples show how to use com.amazonaws.services.cloudwatch.model.StatisticSet. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: MetricDataAggregator.java    From swage with Apache License 2.0 6 votes vote down vote up
/**
 * Add a metric event to be aggregated.
 * Events with the same name, unit, and attributes will have their values
 * aggregated into {@link StatisticSet}s, with the aggregated data
 * available via {@link #flush}.
 *
 * @param context Metric context to use for dimension information
 * @param name Metric name
 * @param value Recorded value for the metric event
 * @param unit Unit for interpreting the value
 */
public void add(
        final MetricRecorder.RecorderContext context,
        final Metric name,
        final double value,
        final StandardUnit unit)
{
    //TODO: avoid doing this every time for a context - caching, or?
    List<Dimension> dimensions = dimensionMapper.getDimensions(name, context);

    DatumKey key = new DatumKey(name.toString(), unit, dimensions);
    statisticsMap.merge(
            key,
            new StatisticSet()
                    .withMaximum(value)
                    .withMinimum(value)
                    .withSampleCount(1D)
                    .withSum(value),
            MetricDataAggregator::sum);
}
 
Example #2
Source File: MetricDataAggregator.java    From swage with Apache License 2.0 6 votes vote down vote up
/**
 * Flush all the current aggregated MetricDatum and return as a list.
 * This is safe to call concurrently with {@link #add}.
 * All data added prior to a flush call will be included in the returned aggregate.
 * Any data added after the flush call returns will be included in a subsequent flush.
 * Data added while a flush call is processing may be included in the current flush
 * or a subsequent flush, but will not be included twice.
 *
 * The timestamp on the aggregated data will be the time it was flushed,
 * not the time of any of the original metric events.
 *
 * @return list of all data aggregated since the last flush
 */
public List<MetricDatum> flush() {
    if (statisticsMap.size() == 0) {
        return Collections.emptyList();
    }

    // Capture all the current metrics, as represented by the set of keys
    // at this time in the statisticsMap.
    // Note that this iterates over the key set of the underlying map, and
    // removes keys from the map at the same time. It is possible keys may
    // be added during this iteration, or data for keys modified between
    // a key being chosen for iteration and being removed from the map.
    // This is ok.  Any new keys will be picked up on subsequent flushes.
    //TODO: use two maps and swap between, to ensure 'perfect' segmentation?
    List<MetricDatum> metricData = new ArrayList<>();
    for (DatumKey key : statisticsMap.keySet()) {
        StatisticSet value = statisticsMap.remove(key);
        //TODO: better to have no timestamp at all?
        MetricDatum metricDatum = key.getDatum().withTimestamp(Date.from(Instant.now()))
                                                .withStatisticValues(value);
        metricData.add(metricDatum);
    }

    return metricData;
}
 
Example #3
Source File: CloudWatchRecorderTest.java    From swage with Apache License 2.0 6 votes vote down vote up
private MetricDatum makeDatum(
        final String id,
        final String name,
        final double sum,
        final double min,
        final double max,
        final int count,
        final StandardUnit unit)
{
    MetricDatum md = new MetricDatum().withMetricName(name).withUnit(unit);

    final StatisticSet statSet = new StatisticSet()
            .withSampleCount(Double.valueOf(count))
            .withSum(sum)
            .withMinimum(min)
            .withMaximum(max);
    md.setStatisticValues(statSet);

    List<Dimension> dimensions = new ArrayList<>(1);
    Dimension trace = new Dimension().withName(ContextData.ID.name).withValue(id);

    dimensions.add(trace);
    md.setDimensions(dimensions);

    return md;
}
 
Example #4
Source File: CloudWatchReporter.java    From metrics-cloudwatch with Apache License 2.0 6 votes vote down vote up
/**
 * @param rescale the submitted sum by this multiplier. 1.0 is the identity (no rescale).
 */
void reportSampling(Map.Entry<String, ? extends Sampling> entry, String typeDimValue, double rescale, List<MetricDatum> data) {
    Sampling metric = entry.getValue();
    Snapshot snapshot = metric.getSnapshot();
    double scaledSum = sum(snapshot.getValues()) * rescale;
    final StatisticSet statisticSet = new StatisticSet()
            .withSum(scaledSum)
            .withSampleCount((double) snapshot.size())
            .withMinimum((double) snapshot.getMin() * rescale)
            .withMaximum((double) snapshot.getMax() * rescale);

    DemuxedKey key = new DemuxedKey(appendGlobalDimensions(entry.getKey()));
    Iterables.addAll(data, key.newDatums(typeDimName, typeDimValue, new Function<MetricDatum, MetricDatum>() {
        @Override
        public MetricDatum apply(MetricDatum datum) {
            return datum.withStatisticValues(statisticSet);
        }
    }));
}
 
Example #5
Source File: MetricDataAggregator.java    From swage with Apache License 2.0 5 votes vote down vote up
private static StatisticSet sum( StatisticSet v1, StatisticSet v2 ) {
    //TODO: reuse one of the passed sets, and pollute a MetricDatum?
    StatisticSet stats = new StatisticSet();

    stats.setMaximum(Math.max(v1.getMaximum(), v2.getMaximum()));
    stats.setMinimum(Math.min(v1.getMinimum(), v2.getMinimum()));
    stats.setSampleCount(v1.getSampleCount() + v2.getSampleCount());
    stats.setSum(v1.getSum() + v2.getSum());

    return stats;
}
 
Example #6
Source File: MetricDataAggregatorTest.java    From swage with Apache License 2.0 5 votes vote down vote up
@Test
public void single() throws Exception {
    final DimensionMapper mapper = new DimensionMapper.Builder()
            .addGlobalDimension(ContextData.ID)
            .build();

    final Metric name = Metric.define("SomeMetric");
    final double value = 3.14;
    final StandardUnit unit = StandardUnit.Terabits;

    final MetricRecorder.RecorderContext context = new MetricRecorder.RecorderContext(ContextData.withId(UUID.randomUUID().toString()).build());

    MetricDataAggregator aggregator = new MetricDataAggregator(mapper);
    aggregator.add(context, name, value, unit);

    List<MetricDatum> ags = aggregator.flush();

    assertEquals("One metric datum should aggregate to one entry", 1, ags.size());
    assertEquals("Metric datum has wrong name", name.toString(), ags.get(0).getMetricName());
    assertEquals("Metric datum has wrong unit", unit.toString(), ags.get(0).getUnit());

    StatisticSet stats = ags.get(0).getStatisticValues();
    assertEquals("Metric datum has wrong stats value", Double.valueOf(value), stats.getSum());
    assertEquals("Metric datum has wrong stats value", Double.valueOf(value), stats.getMinimum());
    assertEquals("Metric datum has wrong stats value", Double.valueOf(value), stats.getMaximum());
    assertEquals("Metric datum has wrong stats count", Double.valueOf(1), stats.getSampleCount());

    assertTrue("Flush with no attributes was non-empty", aggregator.flush().isEmpty());
}
 
Example #7
Source File: PutCloudWatchMetric.java    From nifi with Apache License 2.0 4 votes vote down vote up
@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }
    MetricDatum datum = new MetricDatum();

    try {
        datum.setMetricName(context.getProperty(METRIC_NAME).evaluateAttributeExpressions(flowFile).getValue());
        final String valueString = context.getProperty(VALUE).evaluateAttributeExpressions(flowFile).getValue();
        if (valueString != null) {
            datum.setValue(Double.parseDouble(valueString));
        } else {
            StatisticSet statisticSet = new StatisticSet();
            statisticSet.setMaximum(Double.parseDouble(context.getProperty(MAXIMUM).evaluateAttributeExpressions(flowFile).getValue()));
            statisticSet.setMinimum(Double.parseDouble(context.getProperty(MINIMUM).evaluateAttributeExpressions(flowFile).getValue()));
            statisticSet.setSampleCount(Double.parseDouble(context.getProperty(SAMPLECOUNT).evaluateAttributeExpressions(flowFile).getValue()));
            statisticSet.setSum(Double.parseDouble(context.getProperty(SUM).evaluateAttributeExpressions(flowFile).getValue()));

            datum.setStatisticValues(statisticSet);
        }

        final String timestamp = context.getProperty(TIMESTAMP).evaluateAttributeExpressions(flowFile).getValue();
        if (timestamp != null) {
            datum.setTimestamp(new Date(Long.parseLong(timestamp)));
        }

        final String unit = context.getProperty(UNIT).evaluateAttributeExpressions(flowFile).getValue();
        if (unit != null) {
            datum.setUnit(unit);
        }

        // add dynamic properties as dimensions
        if (!dynamicPropertyNames.isEmpty()) {
            final List<Dimension> dimensions = new ArrayList<>(dynamicPropertyNames.size());
            for (String propertyName : dynamicPropertyNames) {
                final String propertyValue = context.getProperty(propertyName).evaluateAttributeExpressions(flowFile).getValue();
                if (StringUtils.isNotBlank(propertyValue)) {
                    dimensions.add(new Dimension().withName(propertyName).withValue(propertyValue));
                }
            }
            datum.withDimensions(dimensions);
        }

        final PutMetricDataRequest metricDataRequest = new PutMetricDataRequest()
                .withNamespace(context.getProperty(NAMESPACE).evaluateAttributeExpressions(flowFile).getValue())
                .withMetricData(datum);

        putMetricData(metricDataRequest);
        session.transfer(flowFile, REL_SUCCESS);
        getLogger().info("Successfully published cloudwatch metric for {}", new Object[]{flowFile});
    } catch (final Exception e) {
        getLogger().error("Failed to publish cloudwatch metric for {} due to {}", new Object[]{flowFile, e});
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_FAILURE);
    }

}