Skip to content

Commit 8bd8ac3

Browse files
authored
feat(dashboards): Allow arbitrary incomplete buckets in charts (#92316)
This PR has a set of closely related changes to come together to enable arbitrary incomplete buckets in charts. The current situation is that you can tell a `Plottable` that it has a delay of 90s, and it'll mark the last few buckets as incomplete. With _this_ PR you can mark individual points in a `TimeSeries` as `"incomplete": true`, and every plottable will correctly mark them as incomplete. ## Changes The most important change is the `delayed` attribute in `TimeSeriesItem`. This is a synthetic property that doesn't come back from the server. In a surprise twist, we decided that the server _will_ provide this information, but the property will be called `incomplete`, since that's more accurate. The PR updates the type and all references to that property. The second most important change is arbitrary segmentation. We used to split a `TimeSeries` into exactly two series, one complete and one incomplete based on a delay. Now that _any_ point in a `TimeSeries` can be incomplete, the concept of a "delay" doesn't make sense anymore, and there could be any number of complete and incomplete segments. There's a new segmentation function to support this. The final change, which is a consequence of the first two, is that since `Plottable` doesn't accept a `"delay"` property anymore, all the call sites have to mark the incomplete data on each `TimeSeries`. I've updated all the call sites to do this. **N.B.** Once we start moving UIs to use the `/events-timeseries/` endpoint, they won't have to do this anymore, since the server will provide this information.
1 parent 0f1e251 commit 8bd8ac3

19 files changed

+572
-449
lines changed
Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
import {segmentSequentialBy} from 'sentry/utils/array/segmentSequentialBy';
2+
3+
describe('segmentSequentialBy', function () {
4+
it('handles empty array', function () {
5+
expect(segmentSequentialBy([], () => true)).toEqual([]);
6+
});
7+
8+
it('handles single item', function () {
9+
const data = [42];
10+
11+
const result = segmentSequentialBy(data, isEven);
12+
13+
expect(result).toEqual([{predicateValue: true, data: [42]}]);
14+
});
15+
16+
it('groups varying items into partitions', function () {
17+
const data = [1, 3, 5, 2, 4, 6, 7, 9, 11, 8, 10];
18+
19+
const result = segmentSequentialBy(data, isEven);
20+
21+
expect(result).toEqual([
22+
{predicateValue: false, data: [1, 3, 5]},
23+
{predicateValue: true, data: [2, 4, 6]},
24+
{predicateValue: false, data: [7, 9, 11]},
25+
{predicateValue: true, data: [8, 10]},
26+
]);
27+
});
28+
29+
it('handles all items evaluating to the same value', function () {
30+
const data = [2, 4, 6, 8, 10];
31+
32+
const result = segmentSequentialBy(data, isEven);
33+
34+
expect(result).toEqual([{predicateValue: true, data: [2, 4, 6, 8, 10]}]);
35+
});
36+
});
37+
38+
const isEven = (n: number) => n % 2 === 0;
Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
type Predicate<T> = (datum: T) => boolean;
2+
3+
interface Partition<T> {
4+
data: T[];
5+
predicateValue: boolean;
6+
}
7+
8+
/**
9+
* Partitions an array of items into groups based on a predicate function. Similar to Lodash `partition`, but creates multiple groups that respect the sequence of the original data.
10+
*
11+
* @param data - The array of items to partition.
12+
* @param predicate - A function to evaluate against each item.
13+
* @returns An array of partitions. Each partition has a `data` key that contains the items that satisfied the predicate, and a `predicateValue` key that indicates what the predicate returned for every item in the partition.
14+
*/
15+
export function segmentSequentialBy<T>(
16+
data: T[],
17+
predicate: Predicate<T>
18+
): Array<Partition<T>> {
19+
if (!data.length) return [];
20+
21+
const firstDatum: T = data.at(0)!;
22+
let previousPredicateValue = predicate(firstDatum);
23+
24+
let previousPartition: Partition<T> = {
25+
predicateValue: previousPredicateValue,
26+
data: [firstDatum],
27+
};
28+
29+
const partitions: Array<Partition<T>> = [previousPartition];
30+
31+
for (const currentDatum of data.slice(1)) {
32+
const currentPredicateValue = predicate(currentDatum);
33+
34+
if (currentPredicateValue === previousPredicateValue) {
35+
previousPartition.data.push(currentDatum);
36+
} else {
37+
previousPartition = {
38+
predicateValue: currentPredicateValue,
39+
data: [currentDatum],
40+
};
41+
42+
partitions.push(previousPartition);
43+
}
44+
45+
previousPredicateValue = currentPredicateValue;
46+
}
47+
48+
return partitions;
49+
}

static/app/utils/timeSeries/markDelayedData.tsx

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,9 +22,13 @@ export function markDelayedData(timeSeries: TimeSeries, delay: number): TimeSeri
2222
const bucketEndTimestamp = new Date(datum.timestamp).getTime() + bucketSize;
2323
const delayed = bucketEndTimestamp >= ingestionDelayTimestamp;
2424

25+
if (!delayed) {
26+
return datum;
27+
}
28+
2529
return {
2630
...datum,
27-
delayed,
31+
incomplete: true,
2832
};
2933
}),
3034
};
Lines changed: 265 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,265 @@
1+
import {resetMockDate, setMockDate} from 'sentry-test/utils';
2+
3+
import {DurationUnit} from 'sentry/utils/discover/fields';
4+
import type {TimeSeries} from 'sentry/views/dashboards/widgets/common/types';
5+
6+
import {markDelayedData} from './markDelayedData';
7+
import {segmentTimeSeriesByIncompleteData} from './segmentTimeSeriesByIncompleteData';
8+
9+
describe('segmentTimeSeriesByIncompleteData', () => {
10+
beforeEach(() => {
11+
setMockDate(new Date('2024-10-24T15:59:00.000Z')); // Unix: 1729785540000
12+
});
13+
14+
afterEach(() => {
15+
resetMockDate();
16+
});
17+
18+
it('Does not split a series with all complete data', () => {
19+
const serie: TimeSeries = markDelayedData(
20+
{
21+
yAxis: 'p99(span.duration)',
22+
values: [
23+
{
24+
value: 90,
25+
timestamp: 1729785240000, // '2024-10-24T15:54:00.000Z'
26+
},
27+
{
28+
value: 100,
29+
timestamp: 1729785300000, // '2024-10-24T15:55:00.000Z'
30+
},
31+
{
32+
value: 110,
33+
timestamp: 1729785360000, // '2024-10-24T15:56:00.000Z'
34+
},
35+
],
36+
meta: {
37+
valueType: 'duration',
38+
valueUnit: DurationUnit.MILLISECOND,
39+
interval: 1 * 1000,
40+
},
41+
},
42+
90
43+
);
44+
45+
const segments = segmentTimeSeriesByIncompleteData(serie);
46+
const completeSerie = segments?.[0]?.[0];
47+
const incompleteSerie = segments?.[1]?.[0];
48+
49+
expect(completeSerie!.values).toEqual([
50+
{
51+
value: 90,
52+
timestamp: 1729785240000, // '2024-10-24T15:54:00.000Z'
53+
},
54+
{
55+
value: 100,
56+
timestamp: 1729785300000, // '2024-10-24T15:55:00.000Z'
57+
},
58+
{
59+
value: 110,
60+
timestamp: 1729785360000, // '2024-10-24T15:56:00.000Z'
61+
},
62+
]);
63+
64+
expect(incompleteSerie).toBeUndefined();
65+
});
66+
67+
it('Does not split a series with all incomplete data', () => {
68+
const serie: TimeSeries = markDelayedData(
69+
{
70+
yAxis: 'p99(span.duration)',
71+
values: [
72+
{
73+
value: 90,
74+
timestamp: 1729785485000, // '2024-10-24T15:58:05.000Z'
75+
},
76+
{
77+
value: 100,
78+
timestamp: 1729785490000, // '2024-10-24T15:58:10.000Z'
79+
},
80+
{
81+
value: 110,
82+
timestamp: 1729785495000, // '2024-10-24T15:58:15.000Z'
83+
},
84+
{
85+
value: 120,
86+
timestamp: 1729785500000, // '2024-10-24T15:58:20.000Z'
87+
},
88+
],
89+
meta: {
90+
valueType: 'duration',
91+
valueUnit: DurationUnit.MILLISECOND,
92+
interval: 5 * 1000,
93+
},
94+
},
95+
90
96+
);
97+
98+
const segments = segmentTimeSeriesByIncompleteData(serie);
99+
const incompleteSerie = segments?.[0]?.[0];
100+
const completeSerie = segments?.[1]?.[0];
101+
102+
expect(completeSerie).toBeUndefined();
103+
104+
expect(incompleteSerie!.values).toEqual([
105+
{
106+
value: 90,
107+
timestamp: 1729785485000, // '2024-10-24T15:58:05.000Z'
108+
incomplete: true,
109+
},
110+
{
111+
value: 100,
112+
timestamp: 1729785490000, // '2024-10-24T15:58:10.000Z'
113+
incomplete: true,
114+
},
115+
{
116+
value: 110,
117+
timestamp: 1729785495000, // '2024-10-24T15:58:15.000Z'
118+
incomplete: true,
119+
},
120+
{
121+
value: 120,
122+
timestamp: 1729785500000, // '2024-10-24T15:58:20.000Z'
123+
incomplete: true,
124+
},
125+
]);
126+
});
127+
128+
it('Splits a series with partial incomplete data', () => {
129+
const serie: TimeSeries = markDelayedData(
130+
{
131+
yAxis: 'p99(span.duration)',
132+
values: [
133+
{
134+
value: 100,
135+
timestamp: 1729785300000, // '2024-10-24T15:55:00.000Z'
136+
},
137+
{
138+
value: 110,
139+
timestamp: 1729785360000, // '2024-10-24T15:56:00.000Z'
140+
},
141+
{
142+
value: 120,
143+
timestamp: 1729785420000, // '2024-10-24T15:57:00.000Z'
144+
},
145+
{
146+
value: 130,
147+
timestamp: 1729785480000, // '2024-10-24T15:58:00.000Z'
148+
},
149+
{
150+
value: 140,
151+
timestamp: 1729785540000, // '2024-10-24T15:59:00.000Z'
152+
},
153+
],
154+
meta: {
155+
valueType: 'duration',
156+
valueUnit: DurationUnit.MILLISECOND,
157+
interval: 1 * 60 * 1000,
158+
},
159+
},
160+
90
161+
);
162+
163+
const segments = segmentTimeSeriesByIncompleteData(serie);
164+
const completeSerie = segments?.[0]?.[0];
165+
const incompleteSerie = segments?.[1]?.[0];
166+
167+
expect(completeSerie!.values).toEqual([
168+
{
169+
value: 100,
170+
timestamp: 1729785300000, // '2024-10-24T15:55:00.000Z'
171+
},
172+
{
173+
value: 110,
174+
timestamp: 1729785360000, // '2024-10-24T15:56:00.000Z'
175+
},
176+
]);
177+
178+
expect(incompleteSerie!.values).toEqual([
179+
{
180+
value: 110,
181+
timestamp: 1729785360000, // '2024-10-24T15:56:00.000Z'
182+
},
183+
{
184+
value: 120,
185+
timestamp: 1729785420000, // '2024-10-24T15:57:00.000Z'
186+
incomplete: true,
187+
},
188+
{
189+
value: 130,
190+
timestamp: 1729785480000, // '2024-10-24T15:58:00.000Z'
191+
incomplete: true,
192+
},
193+
{
194+
value: 140,
195+
timestamp: 1729785540000, // '2024-10-24T15:59:00.000Z'
196+
incomplete: true,
197+
},
198+
]);
199+
});
200+
201+
it('Splits a series with long buckets', () => {
202+
// The time buckets are an hour long. The ingestion delay is 90s. The last buckets should be marked incomplete.
203+
204+
const serie: TimeSeries = markDelayedData(
205+
{
206+
yAxis: 'p99(span.duration)',
207+
values: [
208+
{
209+
value: 110,
210+
timestamp: 1729771200000, // '2024-10-24T12:00:00.000Z'
211+
},
212+
{
213+
value: 120,
214+
timestamp: 1729774800000, // '2024-10-24T13:00:00.000Z'
215+
},
216+
{
217+
value: 130,
218+
timestamp: 1729778400000, // '2024-10-24T14:00:00.000Z'
219+
},
220+
{
221+
value: 140,
222+
timestamp: 1729782000000, // '2024-10-24T15:00:00.000Z'
223+
},
224+
],
225+
meta: {
226+
valueType: 'duration',
227+
valueUnit: DurationUnit.MILLISECOND,
228+
interval: 1 * 60 * 60 * 1000,
229+
},
230+
},
231+
90
232+
);
233+
234+
const segments = segmentTimeSeriesByIncompleteData(serie);
235+
const completeSerie = segments?.[0]?.[0];
236+
const incompleteSerie = segments?.[1]?.[0];
237+
238+
expect(completeSerie!.values).toEqual([
239+
{
240+
value: 110,
241+
timestamp: 1729771200000, // '2024-10-24T12:00:00.000Z'
242+
},
243+
{
244+
value: 120,
245+
timestamp: 1729774800000, // '2024-10-24T13:00:00.000Z'
246+
},
247+
{
248+
value: 130,
249+
timestamp: 1729778400000, // '2024-10-24T14:00:00.000Z'
250+
},
251+
]);
252+
253+
expect(incompleteSerie!.values).toEqual([
254+
{
255+
value: 130,
256+
timestamp: 1729778400000, // '2024-10-24T14:00:00.000Z'
257+
},
258+
{
259+
value: 140,
260+
timestamp: 1729782000000, // '2024-10-24T15:00:00.000Z'
261+
incomplete: true,
262+
},
263+
]);
264+
});
265+
});

0 commit comments

Comments
 (0)