@grafana/data#DataFrameView TypeScript Examples
The following examples show how to use
@grafana/data#DataFrameView.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: utils.test.ts From grafana-chinese with Apache License 2.0 | 7 votes |
describe('news', () => {
test('convert RssFeed to DataFrame', () => {
const frame = feedToDataFrame(grafana20191216);
expect(frame.length).toBe(5);
// Iterate the links
const view = new DataFrameView<NewsItem>(frame);
const links = view.map((item: NewsItem) => {
return item.link;
});
expect(links).toEqual([
'https://grafana.com/blog/2019/12/13/meet-the-grafana-labs-team-aengus-rooney/',
'https://grafana.com/blog/2019/12/12/register-now-grafanacon-2020-is-coming-to-amsterdam-may-13-14/',
'https://grafana.com/blog/2019/12/10/pro-tips-dashboard-navigation-using-links/',
'https://grafana.com/blog/2019/12/09/how-to-do-automatic-annotations-with-grafana-and-loki/',
'https://grafana.com/blog/2019/12/06/meet-the-grafana-labs-team-ward-bekker/',
]);
});
});
Example #2
Source File: datasource.ts From grafana-chinese with Apache License 2.0 | 6 votes |
async annotationQuery(options: AnnotationQueryRequest<LokiQuery>): Promise<AnnotationEvent[]> {
if (!options.annotation.expr) {
return [];
}
const interpolatedExpr = this.templateSrv.replace(options.annotation.expr, {}, this.interpolateQueryExpr);
const query = { refId: `annotation-${options.annotation.name}`, expr: interpolatedExpr };
const { data } = await this.runRangeQueryWithFallback(query, options).toPromise();
const annotations: AnnotationEvent[] = [];
for (const frame of data) {
const tags: string[] = [];
for (const field of frame.fields) {
if (field.labels) {
tags.push.apply(tags, Object.values(field.labels));
}
}
const view = new DataFrameView<{ ts: string; line: string }>(frame);
view.forEachRow(row => {
annotations.push({
time: new Date(row.ts).valueOf(),
text: row.line,
tags,
});
});
}
return annotations;
}
Example #3
Source File: NewsPanel.tsx From grafana-chinese with Apache License 2.0 | 6 votes |
async loadFeed() {
const { options } = this.props;
try {
const url = options.feedUrl ?? DEFAULT_FEED_URL;
const res = await loadRSSFeed(url);
const frame = feedToDataFrame(res);
this.setState({
news: new DataFrameView<NewsItem>(frame),
isError: false,
});
} catch (err) {
console.error('Error Loading News', err);
this.setState({
news: undefined,
isError: true,
});
}
}
Example #4
Source File: result_transformer.ts From grafana-chinese with Apache License 2.0 | 5 votes |
enhanceDataFrame = (dataFrame: DataFrame, config: LokiOptions | null): void => {
if (!config) {
return;
}
const derivedFields = config.derivedFields ?? [];
if (!derivedFields.length) {
return;
}
const fields = derivedFields.reduce((acc, field) => {
const config: FieldConfig = {};
if (field.url) {
config.links = [
{
url: field.url,
title: '',
},
];
}
const dataFrameField = {
name: field.name,
type: FieldType.string,
config,
values: new ArrayVector<string>([]),
};
acc[field.name] = dataFrameField;
return acc;
}, {} as Record<string, any>);
const view = new DataFrameView(dataFrame);
view.forEachRow((row: { line: string }) => {
for (const field of derivedFields) {
const logMatch = row.line.match(field.matcherRegex);
fields[field.name].values.add(logMatch && logMatch[1]);
}
});
dataFrame.fields = [...dataFrame.fields, ...Object.values(fields)];
}
Example #5
Source File: graph.ts From grafana-chinese with Apache License 2.0 | 5 votes |
onPlotClick(event: JQueryEventObject, pos: any, item: any) {
const scrollContextElement = this.elem.closest('.view') ? this.elem.closest('.view').get()[0] : null;
const contextMenuSourceItem = item;
if (this.panel.xaxis.mode !== 'time') {
// Skip if panel in histogram or series mode
return;
}
if ((pos.ctrlKey || pos.metaKey) && (this.dashboard.meta.canEdit || this.dashboard.meta.canMakeEditable)) {
// Skip if range selected (added in "plotselected" event handler)
if (pos.x !== pos.x1) {
return;
}
setTimeout(() => {
this.eventManager.updateTime({ from: pos.x, to: null });
}, 100);
return;
} else {
this.tooltip.clear(this.plot);
let linksSupplier: LinkModelSupplier<FieldDisplay>;
if (item) {
// pickup y-axis index to know which field's config to apply
const yAxisConfig = this.panel.yaxes[item.series.yaxis.n === 2 ? 1 : 0];
const dataFrame = this.ctrl.dataList[item.series.dataFrameIndex];
const field = dataFrame.fields[item.series.fieldIndex];
const dataIndex = this.getDataIndexWithNullValuesCorrection(item, dataFrame);
let links = this.panel.options.dataLinks || [];
if (field.config.links && field.config.links.length) {
// Append the configured links to the panel datalinks
links = [...links, ...field.config.links];
}
const fieldConfig = {
decimals: yAxisConfig.decimals,
links,
};
const fieldDisplay = getDisplayProcessor({
field: { config: fieldConfig, type: FieldType.number },
theme: getCurrentTheme(),
})(field.values.get(dataIndex));
linksSupplier = links.length
? getFieldLinksSupplier({
display: fieldDisplay,
name: field.name,
view: new DataFrameView(dataFrame),
rowIndex: dataIndex,
colIndex: item.series.fieldIndex,
field: fieldConfig,
})
: undefined;
}
this.scope.$apply(() => {
// Setting nearest CustomScrollbar element as a scroll context for graph context menu
this.contextMenu.setScrollContextElement(scrollContextElement);
this.contextMenu.setSource(contextMenuSourceItem);
this.contextMenu.setMenuItemsSupplier(this.getContextMenuItemsSupplier(pos, linksSupplier) as any);
this.contextMenu.toggleMenu(pos);
});
}
}
Example #6
Source File: linkSuppliers.test.ts From grafana-chinese with Apache License 2.0 | 4 votes |
describe('getLinksFromLogsField', () => {
let originalLinkSrv: LinkService;
beforeAll(() => {
// We do not need more here and TimeSrv is hard to setup fully.
const timeSrvMock: TimeSrv = {
timeRangeForUrl() {
const from = dateTime().subtract(1, 'h');
const to = dateTime();
return { from, to, raw: { from, to } };
},
} as any;
const linkService = new LinkSrv(new TemplateSrv(), timeSrvMock);
originalLinkSrv = getLinkSrv();
setLinkSrv(linkService);
});
afterAll(() => {
setLinkSrv(originalLinkSrv);
});
it('interpolates link from field', () => {
const field: Field = {
name: 'test field',
type: FieldType.number,
config: {
links: [
{
title: 'title1',
url: 'http://domain.com/${__value.raw}',
},
{
title: 'title2',
url: 'http://anotherdomain.sk/${__value.raw}',
},
],
},
values: new ArrayVector([1, 2, 3]),
};
const links = getLinksFromLogsField(field, 2);
expect(links.length).toBe(2);
expect(links[0].href).toBe('http://domain.com/3');
expect(links[1].href).toBe('http://anotherdomain.sk/3');
});
it('handles zero links', () => {
const field: Field = {
name: 'test field',
type: FieldType.number,
config: {},
values: new ArrayVector([1, 2, 3]),
};
const links = getLinksFromLogsField(field, 2);
expect(links.length).toBe(0);
});
it('links to items on the row', () => {
const data = applyFieldOverrides({
data: [
toDataFrame({
name: 'Hello Templates',
refId: 'ZZZ',
fields: [
{ name: 'Time', values: [1, 2, 3] },
{
name: 'Power',
values: [100.2000001, 200, 300],
config: {
unit: 'kW',
decimals: 3,
title: 'TheTitle',
},
},
{
name: 'Last',
values: ['a', 'b', 'c'],
config: {
links: [
{
title: 'By Name',
url: 'http://go/${__data.fields.Power}',
},
{
title: 'By Index',
url: 'http://go/${__data.fields[1]}',
},
{
title: 'By Title',
url: 'http://go/${__data.fields[TheTitle]}',
},
{
title: 'Numeric Value',
url: 'http://go/${__data.fields.Power.numeric}',
},
{
title: 'Text (no suffix)',
url: 'http://go/${__data.fields.Power.text}',
},
{
title: 'Unknown Field',
url: 'http://go/${__data.fields.XYZ}',
},
{
title: 'Data Frame name',
url: 'http://go/${__data.name}',
},
{
title: 'Data Frame refId',
url: 'http://go/${__data.refId}',
},
],
},
},
],
}),
],
fieldOptions: {
defaults: {},
overrides: [],
},
replaceVariables: (val: string) => val,
timeZone: 'utc',
theme: {} as GrafanaTheme,
autoMinMax: true,
})[0];
const rowIndex = 0;
const colIndex = data.fields.length - 1;
const field = data.fields[colIndex];
const fieldDisp: FieldDisplay = {
name: 'hello',
field: field.config,
view: new DataFrameView(data),
rowIndex,
colIndex,
display: field.display!(field.values.get(rowIndex)),
};
const supplier = getFieldLinksSupplier(fieldDisp);
const links = supplier.getLinks({}).map(m => {
return {
title: m.title,
href: m.href,
};
});
expect(links).toMatchInlineSnapshot(`
Array [
Object {
"href": "http://go/100.200 kW",
"title": "By Name",
},
Object {
"href": "http://go/100.200 kW",
"title": "By Index",
},
Object {
"href": "http://go/100.200 kW",
"title": "By Title",
},
Object {
"href": "http://go/100.2000001",
"title": "Numeric Value",
},
Object {
"href": "http://go/100.200",
"title": "Text (no suffix)",
},
Object {
"href": "http://go/\${__data.fields.XYZ}",
"title": "Unknown Field",
},
Object {
"href": "http://go/Hello Templates",
"title": "Data Frame name",
},
Object {
"href": "http://go/ZZZ",
"title": "Data Frame refId",
},
]
`);
});
});
Example #7
Source File: elastic_response.test.ts From grafana-chinese with Apache License 2.0 | 4 votes |
describe('ElasticResponse', () => {
let targets;
let response: any;
let result: any;
describe('simple query and count', () => {
beforeEach(() => {
targets = [
{
refId: 'A',
metrics: [{ type: 'count', id: '1' }],
bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '2' }],
},
];
response = {
responses: [
{
aggregations: {
'2': {
buckets: [
{
doc_count: 10,
key: 1000,
},
{
doc_count: 15,
key: 2000,
},
],
},
},
},
],
};
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 1 series', () => {
expect(result.data.length).toBe(1);
expect(result.data[0].target).toBe('Count');
expect(result.data[0].datapoints.length).toBe(2);
expect(result.data[0].datapoints[0][0]).toBe(10);
expect(result.data[0].datapoints[0][1]).toBe(1000);
});
});
describe('simple query count & avg aggregation', () => {
let result: any;
beforeEach(() => {
targets = [
{
refId: 'A',
metrics: [
{ type: 'count', id: '1' },
{ type: 'avg', field: 'value', id: '2' },
],
bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '3' }],
},
];
response = {
responses: [
{
aggregations: {
'3': {
buckets: [
{
'2': { value: 88 },
doc_count: 10,
key: 1000,
},
{
'2': { value: 99 },
doc_count: 15,
key: 2000,
},
],
},
},
},
],
};
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 2 series', () => {
expect(result.data.length).toBe(2);
expect(result.data[0].datapoints.length).toBe(2);
expect(result.data[0].datapoints[0][0]).toBe(10);
expect(result.data[0].datapoints[0][1]).toBe(1000);
expect(result.data[1].target).toBe('Average value');
expect(result.data[1].datapoints[0][0]).toBe(88);
expect(result.data[1].datapoints[1][0]).toBe(99);
});
});
describe('single group by query one metric', () => {
let result: any;
beforeEach(() => {
targets = [
{
refId: 'A',
metrics: [{ type: 'count', id: '1' }],
bucketAggs: [
{ type: 'terms', field: 'host', id: '2' },
{ type: 'date_histogram', field: '@timestamp', id: '3' },
],
},
];
response = {
responses: [
{
aggregations: {
'2': {
buckets: [
{
'3': {
buckets: [
{ doc_count: 1, key: 1000 },
{ doc_count: 3, key: 2000 },
],
},
doc_count: 4,
key: 'server1',
},
{
'3': {
buckets: [
{ doc_count: 2, key: 1000 },
{ doc_count: 8, key: 2000 },
],
},
doc_count: 10,
key: 'server2',
},
],
},
},
},
],
};
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 2 series', () => {
expect(result.data.length).toBe(2);
expect(result.data[0].datapoints.length).toBe(2);
expect(result.data[0].target).toBe('server1');
expect(result.data[1].target).toBe('server2');
});
});
describe('single group by query two metrics', () => {
let result: any;
beforeEach(() => {
targets = [
{
refId: 'A',
metrics: [
{ type: 'count', id: '1' },
{ type: 'avg', field: '@value', id: '4' },
],
bucketAggs: [
{ type: 'terms', field: 'host', id: '2' },
{ type: 'date_histogram', field: '@timestamp', id: '3' },
],
},
];
response = {
responses: [
{
aggregations: {
'2': {
buckets: [
{
'3': {
buckets: [
{ '4': { value: 10 }, doc_count: 1, key: 1000 },
{ '4': { value: 12 }, doc_count: 3, key: 2000 },
],
},
doc_count: 4,
key: 'server1',
},
{
'3': {
buckets: [
{ '4': { value: 20 }, doc_count: 1, key: 1000 },
{ '4': { value: 32 }, doc_count: 3, key: 2000 },
],
},
doc_count: 10,
key: 'server2',
},
],
},
},
},
],
};
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 2 series', () => {
expect(result.data.length).toBe(4);
expect(result.data[0].datapoints.length).toBe(2);
expect(result.data[0].target).toBe('server1 Count');
expect(result.data[1].target).toBe('server1 Average @value');
expect(result.data[2].target).toBe('server2 Count');
expect(result.data[3].target).toBe('server2 Average @value');
});
});
describe('with percentiles ', () => {
let result: any;
beforeEach(() => {
targets = [
{
refId: 'A',
metrics: [{ type: 'percentiles', settings: { percents: [75, 90] }, id: '1' }],
bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '3' }],
},
];
response = {
responses: [
{
aggregations: {
'3': {
buckets: [
{
'1': { values: { '75': 3.3, '90': 5.5 } },
doc_count: 10,
key: 1000,
},
{
'1': { values: { '75': 2.3, '90': 4.5 } },
doc_count: 15,
key: 2000,
},
],
},
},
},
],
};
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 2 series', () => {
expect(result.data.length).toBe(2);
expect(result.data[0].datapoints.length).toBe(2);
expect(result.data[0].target).toBe('p75');
expect(result.data[1].target).toBe('p90');
expect(result.data[0].datapoints[0][0]).toBe(3.3);
expect(result.data[0].datapoints[0][1]).toBe(1000);
expect(result.data[1].datapoints[1][0]).toBe(4.5);
});
});
describe('with extended_stats', () => {
let result: any;
beforeEach(() => {
targets = [
{
refId: 'A',
metrics: [
{
type: 'extended_stats',
meta: { max: true, std_deviation_bounds_upper: true },
id: '1',
},
],
bucketAggs: [
{ type: 'terms', field: 'host', id: '3' },
{ type: 'date_histogram', id: '4' },
],
},
];
response = {
responses: [
{
aggregations: {
'3': {
buckets: [
{
key: 'server1',
'4': {
buckets: [
{
'1': {
max: 10.2,
min: 5.5,
std_deviation_bounds: { upper: 3, lower: -2 },
},
doc_count: 10,
key: 1000,
},
],
},
},
{
key: 'server2',
'4': {
buckets: [
{
'1': {
max: 10.2,
min: 5.5,
std_deviation_bounds: { upper: 3, lower: -2 },
},
doc_count: 10,
key: 1000,
},
],
},
},
],
},
},
},
],
};
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 4 series', () => {
expect(result.data.length).toBe(4);
expect(result.data[0].datapoints.length).toBe(1);
expect(result.data[0].target).toBe('server1 Max');
expect(result.data[1].target).toBe('server1 Std Dev Upper');
expect(result.data[0].datapoints[0][0]).toBe(10.2);
expect(result.data[1].datapoints[0][0]).toBe(3);
});
});
describe('single group by with alias pattern', () => {
let result: any;
beforeEach(() => {
targets = [
{
refId: 'A',
metrics: [{ type: 'count', id: '1' }],
alias: '{{term @host}} {{metric}} and {{not_exist}} {{@host}}',
bucketAggs: [
{ type: 'terms', field: '@host', id: '2' },
{ type: 'date_histogram', field: '@timestamp', id: '3' },
],
},
];
response = {
responses: [
{
aggregations: {
'2': {
buckets: [
{
'3': {
buckets: [
{ doc_count: 1, key: 1000 },
{ doc_count: 3, key: 2000 },
],
},
doc_count: 4,
key: 'server1',
},
{
'3': {
buckets: [
{ doc_count: 2, key: 1000 },
{ doc_count: 8, key: 2000 },
],
},
doc_count: 10,
key: 'server2',
},
{
'3': {
buckets: [
{ doc_count: 2, key: 1000 },
{ doc_count: 8, key: 2000 },
],
},
doc_count: 10,
key: 0,
},
],
},
},
},
],
};
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 2 series', () => {
expect(result.data.length).toBe(3);
expect(result.data[0].datapoints.length).toBe(2);
expect(result.data[0].target).toBe('server1 Count and {{not_exist}} server1');
expect(result.data[1].target).toBe('server2 Count and {{not_exist}} server2');
expect(result.data[2].target).toBe('0 Count and {{not_exist}} 0');
});
});
describe('histogram response', () => {
let result: any;
beforeEach(() => {
targets = [
{
refId: 'A',
metrics: [{ type: 'count', id: '1' }],
bucketAggs: [{ type: 'histogram', field: 'bytes', id: '3' }],
},
];
response = {
responses: [
{
aggregations: {
'3': {
buckets: [
{ doc_count: 1, key: 1000 },
{ doc_count: 3, key: 2000 },
{ doc_count: 2, key: 1000 },
],
},
},
},
],
};
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return table with byte and count', () => {
expect(result.data[0].rows.length).toBe(3);
expect(result.data[0].columns).toEqual([{ text: 'bytes', filterable: true }, { text: 'Count' }]);
});
});
describe('with two filters agg', () => {
let result: any;
beforeEach(() => {
targets = [
{
refId: 'A',
metrics: [{ type: 'count', id: '1' }],
bucketAggs: [
{
id: '2',
type: 'filters',
settings: {
filters: [{ query: '@metric:cpu' }, { query: '@metric:logins.count' }],
},
},
{ type: 'date_histogram', field: '@timestamp', id: '3' },
],
},
];
response = {
responses: [
{
aggregations: {
'2': {
buckets: {
'@metric:cpu': {
'3': {
buckets: [
{ doc_count: 1, key: 1000 },
{ doc_count: 3, key: 2000 },
],
},
},
'@metric:logins.count': {
'3': {
buckets: [
{ doc_count: 2, key: 1000 },
{ doc_count: 8, key: 2000 },
],
},
},
},
},
},
},
],
};
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 2 series', () => {
expect(result.data.length).toBe(2);
expect(result.data[0].datapoints.length).toBe(2);
expect(result.data[0].target).toBe('@metric:cpu');
expect(result.data[1].target).toBe('@metric:logins.count');
});
});
describe('with dropfirst and last aggregation', () => {
beforeEach(() => {
targets = [
{
refId: 'A',
metrics: [{ type: 'avg', id: '1' }, { type: 'count' }],
bucketAggs: [
{
id: '2',
type: 'date_histogram',
field: 'host',
settings: { trimEdges: 1 },
},
],
},
];
response = {
responses: [
{
aggregations: {
'2': {
buckets: [
{
'1': { value: 1000 },
key: 1,
doc_count: 369,
},
{
'1': { value: 2000 },
key: 2,
doc_count: 200,
},
{
'1': { value: 2000 },
key: 3,
doc_count: 200,
},
],
},
},
},
],
};
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should remove first and last value', () => {
expect(result.data.length).toBe(2);
expect(result.data[0].datapoints.length).toBe(1);
});
});
describe('No group by time', () => {
beforeEach(() => {
targets = [
{
refId: 'A',
metrics: [{ type: 'avg', id: '1' }, { type: 'count' }],
bucketAggs: [{ id: '2', type: 'terms', field: 'host' }],
},
];
response = {
responses: [
{
aggregations: {
'2': {
buckets: [
{
'1': { value: 1000 },
key: 'server-1',
doc_count: 369,
},
{
'1': { value: 2000 },
key: 'server-2',
doc_count: 200,
},
],
},
},
},
],
};
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return table', () => {
expect(result.data.length).toBe(1);
expect(result.data[0].type).toBe('table');
expect(result.data[0].rows.length).toBe(2);
expect(result.data[0].rows[0][0]).toBe('server-1');
expect(result.data[0].rows[0][1]).toBe(1000);
expect(result.data[0].rows[0][2]).toBe(369);
expect(result.data[0].rows[1][0]).toBe('server-2');
expect(result.data[0].rows[1][1]).toBe(2000);
});
});
describe('No group by time with percentiles ', () => {
let result: any;
beforeEach(() => {
targets = [
{
refId: 'A',
metrics: [{ type: 'percentiles', field: 'value', settings: { percents: [75, 90] }, id: '1' }],
bucketAggs: [{ type: 'term', field: 'id', id: '3' }],
},
];
response = {
responses: [
{
aggregations: {
'3': {
buckets: [
{
'1': { values: { '75': 3.3, '90': 5.5 } },
doc_count: 10,
key: 'id1',
},
{
'1': { values: { '75': 2.3, '90': 4.5 } },
doc_count: 15,
key: 'id2',
},
],
},
},
},
],
};
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return table', () => {
expect(result.data.length).toBe(1);
expect(result.data[0].type).toBe('table');
expect(result.data[0].columns[0].text).toBe('id');
expect(result.data[0].columns[1].text).toBe('p75 value');
expect(result.data[0].columns[2].text).toBe('p90 value');
expect(result.data[0].rows.length).toBe(2);
expect(result.data[0].rows[0][0]).toBe('id1');
expect(result.data[0].rows[0][1]).toBe(3.3);
expect(result.data[0].rows[0][2]).toBe(5.5);
expect(result.data[0].rows[1][0]).toBe('id2');
expect(result.data[0].rows[1][1]).toBe(2.3);
expect(result.data[0].rows[1][2]).toBe(4.5);
});
});
describe('Multiple metrics of same type', () => {
beforeEach(() => {
targets = [
{
refId: 'A',
metrics: [
{ type: 'avg', id: '1', field: 'test' },
{ type: 'avg', id: '2', field: 'test2' },
],
bucketAggs: [{ id: '2', type: 'terms', field: 'host' }],
},
];
response = {
responses: [
{
aggregations: {
'2': {
buckets: [
{
'1': { value: 1000 },
'2': { value: 3000 },
key: 'server-1',
doc_count: 369,
},
],
},
},
},
],
};
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should include field in metric name', () => {
expect(result.data[0].type).toBe('table');
expect(result.data[0].rows[0][1]).toBe(1000);
expect(result.data[0].rows[0][2]).toBe(3000);
});
});
describe('Raw documents query', () => {
beforeEach(() => {
targets = [
{
refId: 'A',
metrics: [{ type: 'raw_document', id: '1' }],
bucketAggs: [],
},
];
response = {
responses: [
{
hits: {
total: 100,
hits: [
{
_id: '1',
_type: 'type',
_index: 'index',
_source: { sourceProp: 'asd' },
fields: { fieldProp: 'field' },
},
{
_source: { sourceProp: 'asd2' },
fields: { fieldProp: 'field2' },
},
],
},
},
],
};
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return docs', () => {
expect(result.data.length).toBe(1);
expect(result.data[0].type).toBe('docs');
expect(result.data[0].total).toBe(100);
expect(result.data[0].datapoints.length).toBe(2);
expect(result.data[0].datapoints[0].sourceProp).toBe('asd');
expect(result.data[0].datapoints[0].fieldProp).toBe('field');
});
});
describe('with bucket_script ', () => {
let result: any;
beforeEach(() => {
targets = [
{
refId: 'A',
metrics: [
{ id: '1', type: 'sum', field: '@value' },
{ id: '3', type: 'max', field: '@value' },
{
id: '4',
field: 'select field',
pipelineVariables: [
{ name: 'var1', pipelineAgg: '1' },
{ name: 'var2', pipelineAgg: '3' },
],
settings: { script: 'params.var1 * params.var2' },
type: 'bucket_script',
},
],
bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '2' }],
},
];
response = {
responses: [
{
aggregations: {
'2': {
buckets: [
{
1: { value: 2 },
3: { value: 3 },
4: { value: 6 },
doc_count: 60,
key: 1000,
},
{
1: { value: 3 },
3: { value: 4 },
4: { value: 12 },
doc_count: 60,
key: 2000,
},
],
},
},
},
],
};
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 3 series', () => {
expect(result.data.length).toBe(3);
expect(result.data[0].datapoints.length).toBe(2);
expect(result.data[0].target).toBe('Sum @value');
expect(result.data[1].target).toBe('Max @value');
expect(result.data[2].target).toBe('Sum @value * Max @value');
expect(result.data[0].datapoints[0][0]).toBe(2);
expect(result.data[1].datapoints[0][0]).toBe(3);
expect(result.data[2].datapoints[0][0]).toBe(6);
expect(result.data[0].datapoints[1][0]).toBe(3);
expect(result.data[1].datapoints[1][0]).toBe(4);
expect(result.data[2].datapoints[1][0]).toBe(12);
});
});
describe('simple logs query and count', () => {
const targets: any = [
{
refId: 'A',
metrics: [{ type: 'count', id: '1' }],
bucketAggs: [{ type: 'date_histogram', settings: { interval: 'auto' }, id: '2' }],
context: 'explore',
interval: '10s',
isLogsQuery: true,
key: 'Q-1561369883389-0.7611823271062786-0',
liveStreaming: false,
maxDataPoints: 1620,
query: '',
timeField: '@timestamp',
},
];
const response = {
responses: [
{
aggregations: {
'2': {
buckets: [
{
doc_count: 10,
key: 1000,
},
{
doc_count: 15,
key: 2000,
},
],
},
},
hits: {
hits: [
{
_id: 'fdsfs',
_type: '_doc',
_index: 'mock-index',
_source: {
'@timestamp': '2019-06-24T09:51:19.765Z',
host: 'djisaodjsoad',
message: 'hello, i am a message',
level: 'debug',
fields: {
lvl: 'debug',
},
},
},
{
_id: 'kdospaidopa',
_type: '_doc',
_index: 'mock-index',
_source: {
'@timestamp': '2019-06-24T09:52:19.765Z',
host: 'dsalkdakdop',
message: 'hello, i am also message',
level: 'error',
fields: {
lvl: 'info',
},
},
},
],
},
},
],
};
it('should return histogram aggregation and documents', () => {
const result = new ElasticResponse(targets, response).getLogs();
expect(result.data.length).toBe(2);
const logResults = result.data[0] as MutableDataFrame;
const fields = logResults.fields.map(f => {
return {
name: f.name,
type: f.type,
};
});
expect(fields).toContainEqual({ name: '@timestamp', type: 'time' });
expect(fields).toContainEqual({ name: 'host', type: 'string' });
expect(fields).toContainEqual({ name: 'message', type: 'string' });
let rows = new DataFrameView(logResults);
for (let i = 0; i < rows.length; i++) {
const r = rows.get(i);
expect(r._id).toEqual(response.responses[0].hits.hits[i]._id);
expect(r._type).toEqual(response.responses[0].hits.hits[i]._type);
expect(r._index).toEqual(response.responses[0].hits.hits[i]._index);
expect(r._source).toEqual(flatten(response.responses[0].hits.hits[i]._source, null));
}
// Make a map from the histogram results
const hist: KeyValue<number> = {};
const histogramResults = new MutableDataFrame(result.data[1]);
rows = new DataFrameView(histogramResults);
for (let i = 0; i < rows.length; i++) {
const row = rows.get(i);
hist[row.Time] = row.Count;
}
response.responses[0].aggregations['2'].buckets.forEach((bucket: any) => {
expect(hist[bucket.key]).toEqual(bucket.doc_count);
});
});
it('should map levels field', () => {
const result = new ElasticResponse(targets, response).getLogs(undefined, 'level');
const fieldCache = new FieldCache(result.data[0]);
const field = fieldCache.getFieldByName('level');
expect(field.values.toArray()).toEqual(['debug', 'error']);
});
it('should re map levels field to new field', () => {
const result = new ElasticResponse(targets, response).getLogs(undefined, 'fields.lvl');
const fieldCache = new FieldCache(result.data[0]);
const field = fieldCache.getFieldByName('level');
expect(field.values.toArray()).toEqual(['debug', 'info']);
});
});
});
Example #8
Source File: live_streams.test.ts From grafana-chinese with Apache License 2.0 | 4 votes |
describe('Live Stream Tests', () => {
afterAll(() => {
jest.restoreAllMocks();
});
const msg0: any = {
streams: [
{
labels: '{filename="/var/log/sntpc.log", job="varlogs"}',
entries: [
{
ts: '2019-08-28T20:50:40.118944705Z',
line: 'Kittens',
},
],
},
],
dropped_entries: null,
};
it('reads the values into the buffer', done => {
fakeSocket = new Subject<any>();
const labels: Labels = { job: 'varlogs' };
const target = makeTarget('fake', labels);
const stream = new LiveStreams().getLegacyStream(target);
expect.assertions(4);
const tests = [
(val: DataFrame[]) => {
expect(val[0].length).toEqual(7);
expect(val[0].fields[1].labels).toEqual(labels);
},
(val: DataFrame[]) => {
expect(val[0].length).toEqual(8);
const view = new DataFrameView(val[0]);
const last = { ...view.get(view.length - 1) };
expect(last).toEqual({
ts: '2019-08-28T20:50:40.118944705Z',
id: '81d963f31c276ad2ea1af38b38436237',
line: 'Kittens',
labels: { filename: '/var/log/sntpc.log' },
});
},
];
stream.subscribe({
next: val => {
const test = tests.shift();
test(val);
},
complete: () => done(),
});
// Send it the initial list of things
fakeSocket.next(initialRawResponse);
// Send it a single update
fakeSocket.next(msg0);
fakeSocket.complete();
});
it('returns the same subscription if the url matches existing one', () => {
fakeSocket = new Subject<any>();
const liveStreams = new LiveStreams();
const stream1 = liveStreams.getLegacyStream(makeTarget('url_to_match'));
const stream2 = liveStreams.getLegacyStream(makeTarget('url_to_match'));
expect(stream1).toBe(stream2);
});
it('returns new subscription when the previous unsubscribed', () => {
fakeSocket = new Subject<any>();
const liveStreams = new LiveStreams();
const stream1 = liveStreams.getLegacyStream(makeTarget('url_to_match'));
const subscription = stream1.subscribe({
next: noop,
});
subscription.unsubscribe();
const stream2 = liveStreams.getLegacyStream(makeTarget('url_to_match'));
expect(stream1).not.toBe(stream2);
});
it('returns new subscription when the previous is unsubscribed and correctly unsubscribes from source', () => {
let unsubscribed = false;
fakeSocket = new Observable(() => {
return () => (unsubscribed = true);
}) as any;
const spy = spyOn(rxJsWebSocket, 'webSocket');
spy.and.returnValue(fakeSocket);
const liveStreams = new LiveStreams();
const stream1 = liveStreams.getLegacyStream(makeTarget('url_to_match'));
const subscription = stream1.subscribe({
next: noop,
});
subscription.unsubscribe();
expect(unsubscribed).toBe(true);
});
});