Consistently have the namespace closing comment
[folly.git] / folly / stats / test / TimeseriesHistogramTest.cpp
1 /*
2  * Copyright 2017 Facebook, Inc.
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *   http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16
17 #include <folly/stats/TimeseriesHistogram.h>
18
19 #include <random>
20
21 #include <folly/portability/GTest.h>
22 #include <folly/stats/TimeseriesHistogram-defs.h>
23
24 using namespace std;
25 using namespace folly;
26 using std::chrono::seconds;
27
28 namespace {
29 namespace IntMTMHTS {
30 enum Levels {
31   MINUTE,
32   TEN_MINUTE,
33   HOUR,
34   ALLTIME,
35   NUM_LEVELS,
36 };
37
38 const seconds kDurations[] = {
39     seconds(60),
40     seconds(600),
41     seconds(3600),
42     seconds(0),
43 };
44 } // namespace IntMTMHTS
45
46 namespace IntMHTS {
47 enum Levels {
48   MINUTE,
49   HOUR,
50   ALLTIME,
51   NUM_LEVELS,
52 };
53
54 const seconds kDurations[] = {
55     seconds(60),
56     seconds(3600),
57     seconds(0),
58 };
59 } // namespace IntMHTS
60
61 typedef std::mt19937 RandomInt32;
62
63 using StatsClock = folly::LegacyStatsClock<std::chrono::seconds>;
64 StatsClock::time_point mkTimePoint(int value) {
65   return StatsClock::time_point(StatsClock::duration(value));
66 }
67 } // namespace
68
69 TEST(TimeseriesHistogram, Percentile) {
70   RandomInt32 random(5);
71   // [10, 109], 12 buckets including above and below
72   {
73     TimeseriesHistogram<int> h(
74         10,
75         10,
76         110,
77         MultiLevelTimeSeries<int>(
78             60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
79
80     EXPECT_EQ(0, h.getPercentileEstimate(0, IntMTMHTS::ALLTIME));
81
82     EXPECT_EQ(12, h.getNumBuckets());
83     EXPECT_EQ(10, h.getBucketSize());
84     EXPECT_EQ(10, h.getMin());
85     EXPECT_EQ(110, h.getMax());
86
87     for (size_t i = 0; i < h.getNumBuckets(); ++i) {
88       EXPECT_EQ(4, h.getBucket(i).numLevels());
89     }
90
91     int maxVal = 120;
92     h.addValue(mkTimePoint(0), 0);
93     h.addValue(mkTimePoint(0), maxVal);
94     for (int i = 0; i < 98; i++) {
95       h.addValue(mkTimePoint(0), random() % maxVal);
96     }
97
98     h.update(mkTimePoint(1500000000));
99     // bucket 0 stores everything below min, so its minimum
100     // is the lowest possible number
101     EXPECT_EQ(
102         std::numeric_limits<int>::min(),
103         h.getPercentileBucketMin(1, IntMTMHTS::ALLTIME));
104     EXPECT_EQ(110, h.getPercentileBucketMin(99, IntMTMHTS::ALLTIME));
105
106     EXPECT_EQ(-2, h.getPercentileEstimate(0, IntMTMHTS::ALLTIME));
107     EXPECT_EQ(-1, h.getPercentileEstimate(1, IntMTMHTS::ALLTIME));
108     EXPECT_EQ(119, h.getPercentileEstimate(99, IntMTMHTS::ALLTIME));
109     EXPECT_EQ(120, h.getPercentileEstimate(100, IntMTMHTS::ALLTIME));
110   }
111 }
112
113 TEST(TimeseriesHistogram, String) {
114   RandomInt32 random(5);
115   // [10, 109], 12 buckets including above and below
116   {
117     TimeseriesHistogram<int> hist(
118         10,
119         10,
120         110,
121         MultiLevelTimeSeries<int>(
122             60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
123
124     int maxVal = 120;
125     hist.addValue(mkTimePoint(0), 0);
126     hist.addValue(mkTimePoint(0), maxVal);
127     for (int i = 0; i < 98; i++) {
128       hist.addValue(mkTimePoint(0), random() % maxVal);
129     }
130
131     hist.update(mkTimePoint(0));
132
133     const char* const kStringValues1[IntMTMHTS::NUM_LEVELS] = {
134         "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
135         "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
136         "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
137         "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
138         "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
139         "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
140         "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
141         "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
142     };
143
144     CHECK_EQ(IntMTMHTS::NUM_LEVELS, hist.getNumLevels());
145
146     for (size_t level = 0; level < hist.getNumLevels(); ++level) {
147       EXPECT_EQ(kStringValues1[level], hist.getString(level));
148     }
149
150     const char* const kStringValues2[IntMTMHTS::NUM_LEVELS] = {
151         "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
152         "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
153         "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
154         "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
155         "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
156         "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
157         "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
158         "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
159     };
160
161     CHECK_EQ(IntMTMHTS::NUM_LEVELS, hist.getNumLevels());
162
163     for (size_t level = 0; level < hist.getNumLevels(); ++level) {
164       EXPECT_EQ(kStringValues2[level], hist.getString(level));
165     }
166   }
167 }
168
169 TEST(TimeseriesHistogram, Clear) {
170   {
171     TimeseriesHistogram<int> hist(
172         10,
173         0,
174         100,
175         MultiLevelTimeSeries<int>(
176             60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
177
178     for (int now = 0; now < 3600; now++) {
179       for (int i = 0; i < 100; i++) {
180         hist.addValue(mkTimePoint(now), i, 2); // adds each item 2 times
181       }
182     }
183
184     // check clearing
185     hist.clear();
186
187     for (size_t b = 0; b < hist.getNumBuckets(); ++b) {
188       EXPECT_EQ(0, hist.getBucket(b).count(IntMTMHTS::MINUTE));
189       EXPECT_EQ(0, hist.getBucket(b).count(IntMTMHTS::TEN_MINUTE));
190       EXPECT_EQ(0, hist.getBucket(b).count(IntMTMHTS::HOUR));
191       EXPECT_EQ(0, hist.getBucket(b).count(IntMTMHTS::ALLTIME));
192     }
193
194     for (int pct = 0; pct <= 100; pct++) {
195       EXPECT_EQ(0, hist.getPercentileBucketMin(pct, IntMTMHTS::MINUTE));
196       EXPECT_EQ(0, hist.getPercentileBucketMin(pct, IntMTMHTS::TEN_MINUTE));
197       EXPECT_EQ(0, hist.getPercentileBucketMin(pct, IntMTMHTS::HOUR));
198       EXPECT_EQ(0, hist.getPercentileBucketMin(pct, IntMTMHTS::ALLTIME));
199
200       EXPECT_EQ(0, hist.getPercentileEstimate(pct, IntMTMHTS::MINUTE));
201       EXPECT_EQ(0, hist.getPercentileEstimate(pct, IntMTMHTS::TEN_MINUTE));
202       EXPECT_EQ(0, hist.getPercentileEstimate(pct, IntMTMHTS::HOUR));
203       EXPECT_EQ(0, hist.getPercentileEstimate(pct, IntMTMHTS::ALLTIME));
204     }
205   }
206 }
207
208 TEST(TimeseriesHistogram, Basic) {
209   {
210     TimeseriesHistogram<int> hist(
211         10,
212         0,
213         100,
214         MultiLevelTimeSeries<int>(
215             60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
216
217     for (int now = 0; now < 3600; now++) {
218       for (int i = 0; i < 100; i++) {
219         hist.addValue(mkTimePoint(now), i);
220       }
221     }
222
223     hist.update(mkTimePoint(3599));
224     for (int pct = 1; pct <= 100; pct++) {
225       int expected = (pct - 1) / 10 * 10;
226       EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::MINUTE));
227       EXPECT_EQ(
228           expected, hist.getPercentileBucketMin(pct, IntMTMHTS::TEN_MINUTE));
229       EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::HOUR));
230       EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::ALLTIME));
231     }
232
233     for (size_t b = 1; (b + 1) < hist.getNumBuckets(); ++b) {
234       EXPECT_EQ(600, hist.getBucket(b).count(IntMTMHTS::MINUTE));
235       EXPECT_EQ(6000, hist.getBucket(b).count(IntMTMHTS::TEN_MINUTE));
236       EXPECT_EQ(36000, hist.getBucket(b).count(IntMTMHTS::HOUR));
237       EXPECT_EQ(36000, hist.getBucket(b).count(IntMTMHTS::ALLTIME));
238     }
239     EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::MINUTE));
240     EXPECT_EQ(
241         0, hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::MINUTE));
242
243     EXPECT_EQ(6000, hist.count(IntMTMHTS::MINUTE));
244     EXPECT_EQ(60000, hist.count(IntMTMHTS::TEN_MINUTE));
245     EXPECT_EQ(360000, hist.count(IntMTMHTS::HOUR));
246     EXPECT_EQ(360000, hist.count(IntMTMHTS::ALLTIME));
247
248     // Each second we added 4950 total over 100 data points
249     EXPECT_EQ(297000, hist.sum(IntMTMHTS::MINUTE));
250     EXPECT_EQ(2970000, hist.sum(IntMTMHTS::TEN_MINUTE));
251     EXPECT_EQ(17820000, hist.sum(IntMTMHTS::HOUR));
252     EXPECT_EQ(17820000, hist.sum(IntMTMHTS::ALLTIME));
253
254     EXPECT_EQ(49, hist.avg<int>(IntMTMHTS::MINUTE));
255     EXPECT_EQ(49, hist.avg<int>(IntMTMHTS::TEN_MINUTE));
256     EXPECT_EQ(49, hist.avg<int>(IntMTMHTS::HOUR));
257     EXPECT_EQ(49, hist.avg<int>(IntMTMHTS::ALLTIME));
258     EXPECT_EQ(49.5, hist.avg<double>(IntMTMHTS::MINUTE));
259     EXPECT_EQ(49.5, hist.avg<double>(IntMTMHTS::TEN_MINUTE));
260     EXPECT_EQ(49.5, hist.avg<double>(IntMTMHTS::HOUR));
261     EXPECT_EQ(49.5, hist.avg<double>(IntMTMHTS::ALLTIME));
262
263     EXPECT_EQ(4950, hist.rate<int>(IntMTMHTS::MINUTE));
264     EXPECT_EQ(4950, hist.rate<int>(IntMTMHTS::TEN_MINUTE));
265     EXPECT_EQ(4950, hist.rate<int>(IntMTMHTS::HOUR));
266     EXPECT_EQ(4950, hist.rate<int>(IntMTMHTS::ALLTIME));
267     EXPECT_EQ(4950, hist.rate<double>(IntMTMHTS::MINUTE));
268     EXPECT_EQ(4950, hist.rate<double>(IntMTMHTS::TEN_MINUTE));
269     EXPECT_EQ(4950, hist.rate<double>(IntMTMHTS::HOUR));
270     EXPECT_EQ(4950, hist.rate<double>(IntMTMHTS::ALLTIME));
271
272     EXPECT_EQ(1000, hist.count(mkTimePoint(10), mkTimePoint(20)));
273     EXPECT_EQ(49500, hist.sum(mkTimePoint(10), mkTimePoint(20)));
274     EXPECT_EQ(4950, hist.rate(mkTimePoint(10), mkTimePoint(20)));
275     EXPECT_EQ(49.5, hist.avg<double>(mkTimePoint(10), mkTimePoint(20)));
276
277     EXPECT_EQ(200, hist.count(mkTimePoint(3550), mkTimePoint(3552)));
278     EXPECT_EQ(9900, hist.sum(mkTimePoint(3550), mkTimePoint(3552)));
279     EXPECT_EQ(4950, hist.rate(mkTimePoint(3550), mkTimePoint(3552)));
280     EXPECT_EQ(49.5, hist.avg<double>(mkTimePoint(3550), mkTimePoint(3552)));
281
282     EXPECT_EQ(0, hist.count(mkTimePoint(4550), mkTimePoint(4552)));
283     EXPECT_EQ(0, hist.sum(mkTimePoint(4550), mkTimePoint(4552)));
284     EXPECT_EQ(0, hist.rate(mkTimePoint(4550), mkTimePoint(4552)));
285     EXPECT_EQ(0, hist.avg<double>(mkTimePoint(4550), mkTimePoint(4552)));
286   }
287
288   // -----------------
289
290   {
291     TimeseriesHistogram<int> hist(
292         10,
293         0,
294         100,
295         MultiLevelTimeSeries<int>(
296             60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
297
298     for (int now = 0; now < 3600; now++) {
299       for (int i = 0; i < 100; i++) {
300         hist.addValue(mkTimePoint(now), i, 2); // adds each item 2 times
301       }
302     }
303
304     hist.update(mkTimePoint(3599));
305     for (int pct = 1; pct <= 100; pct++) {
306       int expected = (pct - 1) / 10 * 10;
307       EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::MINUTE));
308       EXPECT_EQ(
309           expected, hist.getPercentileBucketMin(pct, IntMTMHTS::TEN_MINUTE));
310       EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::HOUR));
311       EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::ALLTIME));
312     }
313
314     for (size_t b = 1; (b + 1) < hist.getNumBuckets(); ++b) {
315       EXPECT_EQ(600 * 2, hist.getBucket(b).count(IntMTMHTS::MINUTE));
316       EXPECT_EQ(6000 * 2, hist.getBucket(b).count(IntMTMHTS::TEN_MINUTE));
317       EXPECT_EQ(36000 * 2, hist.getBucket(b).count(IntMTMHTS::HOUR));
318       EXPECT_EQ(36000 * 2, hist.getBucket(b).count(IntMTMHTS::ALLTIME));
319     }
320     EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::MINUTE));
321     EXPECT_EQ(
322         0, hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::MINUTE));
323   }
324
325   // -----------------
326
327   {
328     TimeseriesHistogram<int> hist(
329         10,
330         0,
331         100,
332         MultiLevelTimeSeries<int>(
333             60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
334
335     for (int now = 0; now < 3600; now++) {
336       for (int i = 0; i < 50; i++) {
337         hist.addValue(mkTimePoint(now), i * 2, 2); // adds each item 2 times
338       }
339     }
340
341     hist.update(mkTimePoint(3599));
342     for (int pct = 1; pct <= 100; pct++) {
343       int expected = (pct - 1) / 10 * 10;
344       EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::MINUTE));
345       EXPECT_EQ(
346           expected, hist.getPercentileBucketMin(pct, IntMTMHTS::TEN_MINUTE));
347       EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::HOUR));
348       EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::ALLTIME));
349     }
350
351     EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::MINUTE));
352     EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::TEN_MINUTE));
353     EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::HOUR));
354     EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::ALLTIME));
355     EXPECT_EQ(
356         0, hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::MINUTE));
357     EXPECT_EQ(
358         0,
359         hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::TEN_MINUTE));
360     EXPECT_EQ(
361         0, hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::HOUR));
362     EXPECT_EQ(
363         0, hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::ALLTIME));
364
365     for (size_t b = 1; (b + 1) < hist.getNumBuckets(); ++b) {
366       EXPECT_EQ(600, hist.getBucket(b).count(IntMTMHTS::MINUTE));
367       EXPECT_EQ(6000, hist.getBucket(b).count(IntMTMHTS::TEN_MINUTE));
368       EXPECT_EQ(36000, hist.getBucket(b).count(IntMTMHTS::HOUR));
369       EXPECT_EQ(36000, hist.getBucket(b).count(IntMTMHTS::ALLTIME));
370     }
371
372     for (int i = 0; i < 100; ++i) {
373       hist.addValue(mkTimePoint(3599), 200 + i);
374     }
375     hist.update(mkTimePoint(3599));
376     EXPECT_EQ(
377         100,
378         hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::ALLTIME));
379   }
380 }
381
382 TEST(TimeseriesHistogram, QueryByInterval) {
383   TimeseriesHistogram<int> mhts(
384       8,
385       8,
386       120,
387       MultiLevelTimeSeries<int>(60, IntMHTS::NUM_LEVELS, IntMHTS::kDurations));
388
389   mhts.update(mkTimePoint(0));
390
391   int curTime;
392   for (curTime = 0; curTime < 7200; curTime++) {
393     mhts.addValue(mkTimePoint(curTime), 1);
394   }
395   for (curTime = 7200; curTime < 7200 + 3540; curTime++) {
396     mhts.addValue(mkTimePoint(curTime), 10);
397   }
398   for (curTime = 7200 + 3540; curTime < 7200 + 3600; curTime++) {
399     mhts.addValue(mkTimePoint(curTime), 100);
400   }
401
402   mhts.update(mkTimePoint(7200 + 3600 - 1));
403
404   struct TimeInterval {
405     TimeInterval(int s, int e) : start(mkTimePoint(s)), end(mkTimePoint(e)) {}
406
407     StatsClock::time_point start;
408     StatsClock::time_point end;
409   };
410   TimeInterval intervals[12] = {
411       {curTime - 60, curTime},
412       {curTime - 3600, curTime},
413       {curTime - 7200, curTime},
414       {curTime - 3600, curTime - 60},
415       {curTime - 7200, curTime - 60},
416       {curTime - 7200, curTime - 3600},
417       {curTime - 50, curTime - 20},
418       {curTime - 3020, curTime - 20},
419       {curTime - 7200, curTime - 20},
420       {curTime - 3000, curTime - 1000},
421       {curTime - 7200, curTime - 1000},
422       {curTime - 7200, curTime - 3600},
423   };
424
425   int expectedSums[12] = {
426       6000,
427       41400,
428       32400,
429       35400,
430       32129,
431       16200,
432       3000,
433       33600,
434       32308,
435       20000,
436       27899,
437       16200,
438   };
439
440   int expectedCounts[12] = {
441       60, 3600, 7200, 3540, 7139, 3600, 30, 3000, 7178, 2000, 6199, 3600,
442   };
443
444   // The first 7200 values added all fell below the histogram minimum,
445   // and went into the bucket that tracks all of the too-small values.
446   // This bucket reports a minimum value of the smallest possible integer.
447   int belowMinBucket = std::numeric_limits<int>::min();
448
449   int expectedValues[12][3] = {
450       {96, 96, 96},
451       {8, 8, 96},
452       {belowMinBucket, belowMinBucket, 8}, // alltime
453       {8, 8, 8},
454       {belowMinBucket, belowMinBucket, 8}, // alltime
455       {belowMinBucket, belowMinBucket, 8}, // alltime
456       {96, 96, 96},
457       {8, 8, 96},
458       {belowMinBucket, belowMinBucket, 8}, // alltime
459       {8, 8, 8},
460       {belowMinBucket, belowMinBucket, 8}, // alltime
461       {belowMinBucket, belowMinBucket, 8} // alltime
462   };
463
464   for (int i = 0; i < 12; i++) {
465     const auto& itv = intervals[i];
466     int s = mhts.sum(itv.start, itv.end);
467     EXPECT_EQ(expectedSums[i], s);
468
469     int c = mhts.count(itv.start, itv.end);
470     EXPECT_EQ(expectedCounts[i], c);
471   }
472
473   // 3 levels
474   for (int i = 1; i <= 100; i++) {
475     EXPECT_EQ(96, mhts.getPercentileBucketMin(i, 0));
476     EXPECT_EQ(
477         96,
478         mhts.getPercentileBucketMin(
479             i, mkTimePoint(curTime - 60), mkTimePoint(curTime)));
480     EXPECT_EQ(
481         8,
482         mhts.getPercentileBucketMin(
483             i, mkTimePoint(curTime - 3540), mkTimePoint(curTime - 60)));
484   }
485
486   EXPECT_EQ(8, mhts.getPercentileBucketMin(1, 1));
487   EXPECT_EQ(8, mhts.getPercentileBucketMin(98, 1));
488   EXPECT_EQ(96, mhts.getPercentileBucketMin(99, 1));
489   EXPECT_EQ(96, mhts.getPercentileBucketMin(100, 1));
490
491   EXPECT_EQ(belowMinBucket, mhts.getPercentileBucketMin(1, 2));
492   EXPECT_EQ(belowMinBucket, mhts.getPercentileBucketMin(66, 2));
493   EXPECT_EQ(8, mhts.getPercentileBucketMin(67, 2));
494   EXPECT_EQ(8, mhts.getPercentileBucketMin(99, 2));
495   EXPECT_EQ(96, mhts.getPercentileBucketMin(100, 2));
496
497   // 0 is currently the value for bucket 0 (below min)
498   for (int i = 0; i < 12; i++) {
499     const auto& itv = intervals[i];
500     int v = mhts.getPercentileBucketMin(1, itv.start, itv.end);
501     EXPECT_EQ(expectedValues[i][0], v);
502
503     v = mhts.getPercentileBucketMin(50, itv.start, itv.end);
504     EXPECT_EQ(expectedValues[i][1], v);
505
506     v = mhts.getPercentileBucketMin(99, itv.start, itv.end);
507     EXPECT_EQ(expectedValues[i][2], v);
508   }
509
510   for (int i = 0; i < 12; i++) {
511     const auto& itv = intervals[i];
512     // Some of the older intervals that fall in the alltime bucket
513     // are off by 1 or 2 in their estimated counts.
514     size_t tolerance = 0;
515     if (itv.start <= mkTimePoint(curTime - 7200)) {
516       tolerance = 2;
517     } else if (itv.start <= mkTimePoint(curTime - 3000)) {
518       tolerance = 1;
519     }
520     size_t actualCount = (itv.end - itv.start).count();
521     size_t estimatedCount = mhts.count(itv.start, itv.end);
522     EXPECT_GE(actualCount, estimatedCount);
523     EXPECT_LE(actualCount - tolerance, estimatedCount);
524   }
525 }
526
527 TEST(TimeseriesHistogram, SingleUniqueValue) {
528   int values[] = {-1, 0, 500, 1000, 1500};
529   for (int ii = 0; ii < 5; ++ii) {
530     int value = values[ii];
531     TimeseriesHistogram<int> h(
532         10,
533         0,
534         1000,
535         MultiLevelTimeSeries<int>(
536             60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
537
538     const int kNumIters = 1000;
539     for (int jj = 0; jj < kNumIters; ++jj) {
540       h.addValue(mkTimePoint(1), value);
541     }
542     h.update(mkTimePoint(1));
543     // since we've only added one unique value, all percentiles should
544     // be that value
545     EXPECT_EQ(h.getPercentileEstimate(10, 0), value);
546     EXPECT_EQ(h.getPercentileEstimate(50, 0), value);
547     EXPECT_EQ(h.getPercentileEstimate(99, 0), value);
548
549     // Things get trickier if there are multiple unique values.
550     const int kNewValue = 750;
551     for (int kk = 0; kk < 2 * kNumIters; ++kk) {
552       h.addValue(mkTimePoint(1), kNewValue);
553     }
554     h.update(mkTimePoint(1));
555     EXPECT_NEAR(h.getPercentileEstimate(50, 0), kNewValue + 5, 5);
556     if (value >= 0 && value <= 1000) {
557       // only do further testing if value is within our bucket range,
558       // else estimates can be wildly off
559       if (kNewValue > value) {
560         EXPECT_NEAR(h.getPercentileEstimate(10, 0), value + 5, 5);
561         EXPECT_NEAR(h.getPercentileEstimate(99, 0), kNewValue + 5, 5);
562       } else {
563         EXPECT_NEAR(h.getPercentileEstimate(10, 0), kNewValue + 5, 5);
564         EXPECT_NEAR(h.getPercentileEstimate(99, 0), value + 5, 5);
565       }
566     }
567   }
568 }