Java Code Examples for com.tdunning.math.stats.TDigest#createMergingDigest()
The following examples show how to use
com.tdunning.math.stats.TDigest#createMergingDigest() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ObjectSerDeUtilsTest.java From incubator-pinot with Apache License 2.0 | 6 votes |
@Test public void testTDigest() { for (int i = 0; i < NUM_ITERATIONS; i++) { TDigest expected = TDigest.createMergingDigest(PercentileTDigestAggregationFunction.DEFAULT_TDIGEST_COMPRESSION); int size = RANDOM.nextInt(100) + 1; for (int j = 0; j < size; j++) { expected.add(RANDOM.nextDouble()); } byte[] bytes = ObjectSerDeUtils.serialize(expected); TDigest actual = ObjectSerDeUtils.deserialize(bytes, ObjectSerDeUtils.ObjectType.TDigest); for (int j = 0; j <= 100; j++) { assertEquals(actual.quantile(j / 100.0), expected.quantile(j / 100.0), 1e-5); } } }
Example 2
Source File: PercentileTDigestAggregationFunction.java From incubator-pinot with Apache License 2.0 | 5 votes |
@Override public TDigest extractAggregationResult(AggregationResultHolder aggregationResultHolder) { TDigest tDigest = aggregationResultHolder.getResult(); if (tDigest == null) { return TDigest.createMergingDigest(DEFAULT_TDIGEST_COMPRESSION); } else { return tDigest; } }
Example 3
Source File: PercentileTDigestAggregationFunction.java From incubator-pinot with Apache License 2.0 | 5 votes |
@Override public TDigest extractGroupByResult(GroupByResultHolder groupByResultHolder, int groupKey) { TDigest tDigest = groupByResultHolder.getResult(groupKey); if (tDigest == null) { return TDigest.createMergingDigest(DEFAULT_TDIGEST_COMPRESSION); } else { return tDigest; } }
Example 4
Source File: PercentileTDigestAggregationFunction.java From incubator-pinot with Apache License 2.0 | 5 votes |
/** * Returns the TDigest from the result holder or creates a new one with default compression if it does not exist. * * @param aggregationResultHolder Result holder * @return TDigest from the result holder */ protected static TDigest getDefaultTDigest(AggregationResultHolder aggregationResultHolder) { TDigest tDigest = aggregationResultHolder.getResult(); if (tDigest == null) { tDigest = TDigest.createMergingDigest(DEFAULT_TDIGEST_COMPRESSION); aggregationResultHolder.setValue(tDigest); } return tDigest; }
Example 5
Source File: PercentileTDigestAggregationFunction.java From incubator-pinot with Apache License 2.0 | 5 votes |
/** * Returns the TDigest for the given group key if exists, or creates a new one with default compression. * * @param groupByResultHolder Result holder * @param groupKey Group key for which to return the TDigest * @return TDigest for the group key */ protected static TDigest getDefaultTDigest(GroupByResultHolder groupByResultHolder, int groupKey) { TDigest tDigest = groupByResultHolder.getResult(groupKey); if (tDigest == null) { tDigest = TDigest.createMergingDigest(DEFAULT_TDIGEST_COMPRESSION); groupByResultHolder.setValueForKey(groupKey, tDigest); } return tDigest; }
Example 6
Source File: PercentileTDigestValueAggregator.java From incubator-pinot with Apache License 2.0 | 5 votes |
@Override public TDigest getInitialAggregatedValue(Object rawValue) { TDigest initialValue; if (rawValue instanceof byte[]) { byte[] bytes = (byte[]) rawValue; initialValue = deserializeAggregatedValue(bytes); _maxByteSize = Math.max(_maxByteSize, bytes.length); } else { initialValue = TDigest.createMergingDigest(PercentileTDigestAggregationFunction.DEFAULT_TDIGEST_COMPRESSION); initialValue.add(((Number) rawValue).doubleValue()); _maxByteSize = Math.max(_maxByteSize, initialValue.byteSize()); } return initialValue; }
Example 7
Source File: PreAggregatedPercentileTDigestStarTreeV2Test.java From incubator-pinot with Apache License 2.0 | 5 votes |
@Override Object getRandomRawValue(Random random) { TDigest tDigest = TDigest.createMergingDigest(COMPRESSION); tDigest.add(random.nextLong()); tDigest.add(random.nextLong()); return ObjectSerDeUtils.TDIGEST_SER_DE.serialize(tDigest); }
Example 8
Source File: SegmentGenerationWithBytesTypeTest.java From incubator-pinot with Apache License 2.0 | 5 votes |
/** * Build Avro file containing serialized TDigest bytes. * * @param schema Schema of data (one fixed and one variable column) * @param _fixedExpected Serialized bytes of fixed length column are populated here * @param _varExpected Serialized bytes of variable length column are populated here * @throws IOException */ private void buildAvro(Schema schema, List<byte[]> _fixedExpected, List<byte[]> _varExpected) throws IOException { org.apache.avro.Schema avroSchema = AvroUtils.getAvroSchemaFromPinotSchema(schema); try (DataFileWriter<GenericData.Record> recordWriter = new DataFileWriter<>(new GenericDatumWriter<>(avroSchema))) { if (!new File(AVRO_DIR_NAME).mkdir()) { throw new RuntimeException("Unable to create test directory: " + AVRO_DIR_NAME); } recordWriter.create(avroSchema, new File(AVRO_DIR_NAME, AVRO_NAME)); for (int i = 0; i < NUM_ROWS; i++) { GenericData.Record record = new GenericData.Record(avroSchema); TDigest tDigest = TDigest.createMergingDigest(PercentileTDigestAggregationFunction.DEFAULT_TDIGEST_COMPRESSION); tDigest.add(_random.nextDouble()); ByteBuffer buffer = ByteBuffer.allocate(tDigest.byteSize()); tDigest.asBytes(buffer); _fixedExpected.add(buffer.array()); buffer.flip(); record.put(FIXED_BYTES_UNSORTED_COLUMN, buffer); if (i % 2 == 0) { tDigest.add(_random.nextDouble()); } buffer = ByteBuffer.allocate(tDigest.byteSize()); tDigest.asBytes(buffer); _varExpected.add(buffer.array()); buffer.flip(); record.put(VARIABLE_BYTES_COLUMN, buffer); recordWriter.append(record); } } }
Example 9
Source File: PercentileTDigestMVQueriesTest.java From incubator-pinot with Apache License 2.0 | 4 votes |
@Override protected void buildSegment() throws Exception { List<GenericRow> rows = new ArrayList<>(NUM_ROWS); for (int i = 0; i < NUM_ROWS; i++) { HashMap<String, Object> valueMap = new HashMap<>(); int numMultiValues = RANDOM.nextInt(MAX_NUM_MULTI_VALUES) + 1; Double[] values = new Double[numMultiValues]; TDigest tDigest = TDigest.createMergingDigest(PercentileTDigestAggregationFunction.DEFAULT_TDIGEST_COMPRESSION); for (int j = 0; j < numMultiValues; j++) { double value = RANDOM.nextDouble() * VALUE_RANGE; values[j] = value; tDigest.add(value); } valueMap.put(DOUBLE_COLUMN, values); ByteBuffer byteBuffer = ByteBuffer.allocate(tDigest.byteSize()); tDigest.asBytes(byteBuffer); valueMap.put(TDIGEST_COLUMN, byteBuffer.array()); String group = GROUPS[RANDOM.nextInt(GROUPS.length)]; valueMap.put(GROUP_BY_COLUMN, group); GenericRow genericRow = new GenericRow(); genericRow.init(valueMap); rows.add(genericRow); } Schema schema = new Schema(); schema.addField(new DimensionFieldSpec(DOUBLE_COLUMN, FieldSpec.DataType.DOUBLE, false)); schema.addField(new MetricFieldSpec(TDIGEST_COLUMN, FieldSpec.DataType.BYTES)); schema.addField(new DimensionFieldSpec(GROUP_BY_COLUMN, FieldSpec.DataType.STRING, true)); TableConfig tableConfig = new TableConfigBuilder(TableType.OFFLINE).setTableName(TABLE_NAME).build(); SegmentGeneratorConfig config = new SegmentGeneratorConfig(tableConfig, schema); config.setOutDir(INDEX_DIR.getPath()); config.setTableName(TABLE_NAME); config.setSegmentName(SEGMENT_NAME); config.setRawIndexCreationColumns(Collections.singletonList(TDIGEST_COLUMN)); SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl(); try (RecordReader recordReader = new GenericRowRecordReader(rows)) { driver.init(config, recordReader); driver.build(); } }
Example 10
Source File: PercentileTDigestQueriesTest.java From incubator-pinot with Apache License 2.0 | 4 votes |
protected void buildSegment() throws Exception { List<GenericRow> rows = new ArrayList<>(NUM_ROWS); for (int i = 0; i < NUM_ROWS; i++) { HashMap<String, Object> valueMap = new HashMap<>(); double value = RANDOM.nextDouble() * VALUE_RANGE; valueMap.put(DOUBLE_COLUMN, value); TDigest tDigest = TDigest.createMergingDigest(PercentileTDigestAggregationFunction.DEFAULT_TDIGEST_COMPRESSION); tDigest.add(value); ByteBuffer byteBuffer = ByteBuffer.allocate(tDigest.byteSize()); tDigest.asBytes(byteBuffer); valueMap.put(TDIGEST_COLUMN, byteBuffer.array()); String group = GROUPS[RANDOM.nextInt(GROUPS.length)]; valueMap.put(GROUP_BY_COLUMN, group); GenericRow genericRow = new GenericRow(); genericRow.init(valueMap); rows.add(genericRow); } Schema schema = new Schema(); schema.addField(new MetricFieldSpec(DOUBLE_COLUMN, FieldSpec.DataType.DOUBLE)); schema.addField(new MetricFieldSpec(TDIGEST_COLUMN, FieldSpec.DataType.BYTES)); schema.addField(new DimensionFieldSpec(GROUP_BY_COLUMN, FieldSpec.DataType.STRING, true)); TableConfig tableConfig = new TableConfigBuilder(TableType.OFFLINE).setTableName(TABLE_NAME).build(); SegmentGeneratorConfig config = new SegmentGeneratorConfig(tableConfig, schema); config.setOutDir(INDEX_DIR.getPath()); config.setTableName(TABLE_NAME); config.setSegmentName(SEGMENT_NAME); config.setRawIndexCreationColumns(Collections.singletonList(TDIGEST_COLUMN)); SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl(); try (RecordReader recordReader = new GenericRowRecordReader(rows)) { driver.init(config, recordReader); driver.build(); } }
Example 11
Source File: TDigestNumericHistogram.java From geowave with Apache License 2.0 | 4 votes |
public TDigestNumericHistogram() { super(); tdigest = TDigest.createMergingDigest(DEFAULT_COMPRESSION); }