Fix the linking of various tests against GMock
[folly.git] / folly / test / TimeseriesHistogramTest.cpp
1 /*
2  * Copyright 2017 Facebook, Inc.
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *   http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16
17 #include <folly/stats/TimeseriesHistogram.h>
18 #include <folly/stats/TimeseriesHistogram-defs.h>
19
20 #include <random>
21
22 #include <folly/portability/GTest.h>
23
24 using namespace std;
25 using namespace folly;
26 using std::chrono::seconds;
27
28 namespace {
29 namespace IntMTMHTS {
30   enum Levels {
31     MINUTE,
32     TEN_MINUTE,
33     HOUR,
34     ALLTIME,
35     NUM_LEVELS,
36   };
37
38   const seconds kDurations[] = {
39     seconds(60), seconds(600), seconds(3600), seconds(0)
40   };
41 };
42
43 namespace IntMHTS {
44   enum Levels {
45     MINUTE,
46     HOUR,
47     ALLTIME,
48     NUM_LEVELS,
49   };
50
51   const seconds kDurations[] = {
52     seconds(60), seconds(3600), seconds(0)
53   };
54 };
55
56 typedef std::mt19937 RandomInt32;
57
58 using StatsClock = folly::LegacyStatsClock<std::chrono::seconds>;
59 StatsClock::time_point mkTimePoint(int value) {
60   return StatsClock::time_point(StatsClock::duration(value));
61 }
62 }
63
64 TEST(TimeseriesHistogram, Percentile) {
65   RandomInt32 random(5);
66   // [10, 109], 12 buckets including above and below
67   {
68     TimeseriesHistogram<int> h(10, 10, 110,
69                                MultiLevelTimeSeries<int>(
70                                  60, IntMTMHTS::NUM_LEVELS,
71                                  IntMTMHTS::kDurations));
72
73     EXPECT_EQ(0, h.getPercentileEstimate(0, IntMTMHTS::ALLTIME));
74
75     EXPECT_EQ(12, h.getNumBuckets());
76     EXPECT_EQ(10, h.getBucketSize());
77     EXPECT_EQ(10, h.getMin());
78     EXPECT_EQ(110, h.getMax());
79
80     for (size_t i = 0; i < h.getNumBuckets(); ++i) {
81       EXPECT_EQ(4, h.getBucket(i).numLevels());
82     }
83
84     int maxVal = 120;
85     h.addValue(mkTimePoint(0), 0);
86     h.addValue(mkTimePoint(0), maxVal);
87     for (int i = 0; i < 98; i++) {
88       h.addValue(mkTimePoint(0), random() % maxVal);
89     }
90
91     h.update(mkTimePoint(1500000000));
92     // bucket 0 stores everything below min, so its minimum
93     // is the lowest possible number
94     EXPECT_EQ(std::numeric_limits<int>::min(),
95               h.getPercentileBucketMin(1, IntMTMHTS::ALLTIME));
96     EXPECT_EQ(110, h.getPercentileBucketMin(99, IntMTMHTS::ALLTIME));
97
98     EXPECT_EQ(-2, h.getPercentileEstimate(0, IntMTMHTS::ALLTIME));
99     EXPECT_EQ(-1, h.getPercentileEstimate(1, IntMTMHTS::ALLTIME));
100     EXPECT_EQ(119, h.getPercentileEstimate(99, IntMTMHTS::ALLTIME));
101     EXPECT_EQ(120, h.getPercentileEstimate(100, IntMTMHTS::ALLTIME));
102   }
103 }
104
105 TEST(TimeseriesHistogram, String) {
106   RandomInt32 random(5);
107   // [10, 109], 12 buckets including above and below
108   {
109     TimeseriesHistogram<int> hist(10, 10, 110,
110                                   MultiLevelTimeSeries<int>(
111                                     60, IntMTMHTS::NUM_LEVELS,
112                                     IntMTMHTS::kDurations));
113
114     int maxVal = 120;
115     hist.addValue(mkTimePoint(0), 0);
116     hist.addValue(mkTimePoint(0), maxVal);
117     for (int i = 0; i < 98; i++) {
118       hist.addValue(mkTimePoint(0), random() % maxVal);
119     }
120
121     hist.update(mkTimePoint(0));
122
123     const char* const kStringValues1[IntMTMHTS::NUM_LEVELS] =  {
124       "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
125         "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
126       "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
127         "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
128       "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
129         "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
130       "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
131         "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
132     };
133
134     CHECK_EQ(IntMTMHTS::NUM_LEVELS, hist.getNumLevels());
135
136     for (size_t level = 0; level < hist.getNumLevels(); ++level) {
137       EXPECT_EQ(kStringValues1[level], hist.getString(level));
138     }
139
140     const char* const kStringValues2[IntMTMHTS::NUM_LEVELS] =  {
141       "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
142         "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
143       "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
144         "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
145       "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
146         "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
147       "-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
148         "70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
149     };
150
151     CHECK_EQ(IntMTMHTS::NUM_LEVELS, hist.getNumLevels());
152
153     for (size_t level = 0; level < hist.getNumLevels(); ++level) {
154       EXPECT_EQ(kStringValues2[level], hist.getString(level));
155     }
156   }
157 }
158
159 TEST(TimeseriesHistogram, Clear) {
160   {
161     TimeseriesHistogram<int> hist(10, 0, 100,
162                                   MultiLevelTimeSeries<int>(
163                                     60, IntMTMHTS::NUM_LEVELS,
164                                     IntMTMHTS::kDurations));
165
166     for (int now = 0; now < 3600; now++) {
167       for (int i = 0; i < 100; i++) {
168         hist.addValue(mkTimePoint(now), i, 2); // adds each item 2 times
169       }
170     }
171
172     // check clearing
173     hist.clear();
174
175     for (size_t b = 0; b < hist.getNumBuckets(); ++b) {
176       EXPECT_EQ(0, hist.getBucket(b).count(IntMTMHTS::MINUTE));
177       EXPECT_EQ(0, hist.getBucket(b).count(IntMTMHTS::TEN_MINUTE));
178       EXPECT_EQ(0, hist.getBucket(b).count(IntMTMHTS::HOUR));
179       EXPECT_EQ(0, hist.getBucket(b).count(IntMTMHTS::ALLTIME));
180     }
181
182     for (int pct = 0; pct <= 100; pct++) {
183       EXPECT_EQ(0, hist.getPercentileBucketMin(pct, IntMTMHTS::MINUTE));
184       EXPECT_EQ(0, hist.getPercentileBucketMin(pct, IntMTMHTS::TEN_MINUTE));
185       EXPECT_EQ(0, hist.getPercentileBucketMin(pct, IntMTMHTS::HOUR));
186       EXPECT_EQ(0, hist.getPercentileBucketMin(pct, IntMTMHTS::ALLTIME));
187
188       EXPECT_EQ(0, hist.getPercentileEstimate(pct, IntMTMHTS::MINUTE));
189       EXPECT_EQ(0, hist.getPercentileEstimate(pct, IntMTMHTS::TEN_MINUTE));
190       EXPECT_EQ(0, hist.getPercentileEstimate(pct, IntMTMHTS::HOUR));
191       EXPECT_EQ(0, hist.getPercentileEstimate(pct, IntMTMHTS::ALLTIME));
192     }
193   }
194 }
195
196
197 TEST(TimeseriesHistogram, Basic) {
198   {
199     TimeseriesHistogram<int> hist(10, 0, 100,
200                                   MultiLevelTimeSeries<int>(
201                                     60, IntMTMHTS::NUM_LEVELS,
202                                     IntMTMHTS::kDurations));
203
204     for (int now = 0; now < 3600; now++) {
205       for (int i = 0; i < 100; i++) {
206         hist.addValue(mkTimePoint(now), i);
207       }
208     }
209
210     hist.update(mkTimePoint(3599));
211     for (int pct = 1; pct <= 100; pct++) {
212       int expected = (pct - 1) / 10 * 10;
213       EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::MINUTE));
214       EXPECT_EQ(expected, hist.getPercentileBucketMin(pct,
215                                                       IntMTMHTS::TEN_MINUTE));
216       EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::HOUR));
217       EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::ALLTIME));
218     }
219
220     for (size_t b = 1; (b + 1) < hist.getNumBuckets(); ++b) {
221       EXPECT_EQ(600, hist.getBucket(b).count(IntMTMHTS::MINUTE));
222       EXPECT_EQ(6000, hist.getBucket(b).count(IntMTMHTS::TEN_MINUTE));
223       EXPECT_EQ(36000, hist.getBucket(b).count(IntMTMHTS::HOUR));
224       EXPECT_EQ(36000, hist.getBucket(b).count(IntMTMHTS::ALLTIME));
225     }
226     EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::MINUTE));
227     EXPECT_EQ(0, hist.getBucket(hist.getNumBuckets() - 1).count(
228                 IntMTMHTS::MINUTE));
229
230     EXPECT_EQ(6000, hist.count(IntMTMHTS::MINUTE));
231     EXPECT_EQ(60000, hist.count(IntMTMHTS::TEN_MINUTE));
232     EXPECT_EQ(360000, hist.count(IntMTMHTS::HOUR));
233     EXPECT_EQ(360000, hist.count(IntMTMHTS::ALLTIME));
234
235     // Each second we added 4950 total over 100 data points
236     EXPECT_EQ(297000, hist.sum(IntMTMHTS::MINUTE));
237     EXPECT_EQ(2970000, hist.sum(IntMTMHTS::TEN_MINUTE));
238     EXPECT_EQ(17820000, hist.sum(IntMTMHTS::HOUR));
239     EXPECT_EQ(17820000, hist.sum(IntMTMHTS::ALLTIME));
240
241     EXPECT_EQ(49, hist.avg<int>(IntMTMHTS::MINUTE));
242     EXPECT_EQ(49, hist.avg<int>(IntMTMHTS::TEN_MINUTE));
243     EXPECT_EQ(49, hist.avg<int>(IntMTMHTS::HOUR));
244     EXPECT_EQ(49, hist.avg<int>(IntMTMHTS::ALLTIME));
245     EXPECT_EQ(49.5, hist.avg<double>(IntMTMHTS::MINUTE));
246     EXPECT_EQ(49.5, hist.avg<double>(IntMTMHTS::TEN_MINUTE));
247     EXPECT_EQ(49.5, hist.avg<double>(IntMTMHTS::HOUR));
248     EXPECT_EQ(49.5, hist.avg<double>(IntMTMHTS::ALLTIME));
249
250     EXPECT_EQ(4950, hist.rate<int>(IntMTMHTS::MINUTE));
251     EXPECT_EQ(4950, hist.rate<int>(IntMTMHTS::TEN_MINUTE));
252     EXPECT_EQ(4950, hist.rate<int>(IntMTMHTS::HOUR));
253     EXPECT_EQ(4950, hist.rate<int>(IntMTMHTS::ALLTIME));
254     EXPECT_EQ(4950, hist.rate<double>(IntMTMHTS::MINUTE));
255     EXPECT_EQ(4950, hist.rate<double>(IntMTMHTS::TEN_MINUTE));
256     EXPECT_EQ(4950, hist.rate<double>(IntMTMHTS::HOUR));
257     EXPECT_EQ(4950, hist.rate<double>(IntMTMHTS::ALLTIME));
258
259     EXPECT_EQ(1000, hist.count(mkTimePoint(10), mkTimePoint(20)));
260     EXPECT_EQ(49500, hist.sum(mkTimePoint(10), mkTimePoint(20)));
261     EXPECT_EQ(4950, hist.rate(mkTimePoint(10), mkTimePoint(20)));
262     EXPECT_EQ(49.5, hist.avg<double>(mkTimePoint(10), mkTimePoint(20)));
263
264     EXPECT_EQ(200, hist.count(mkTimePoint(3550), mkTimePoint(3552)));
265     EXPECT_EQ(9900, hist.sum(mkTimePoint(3550), mkTimePoint(3552)));
266     EXPECT_EQ(4950, hist.rate(mkTimePoint(3550), mkTimePoint(3552)));
267     EXPECT_EQ(49.5, hist.avg<double>(mkTimePoint(3550), mkTimePoint(3552)));
268
269     EXPECT_EQ(0, hist.count(mkTimePoint(4550), mkTimePoint(4552)));
270     EXPECT_EQ(0, hist.sum(mkTimePoint(4550), mkTimePoint(4552)));
271     EXPECT_EQ(0, hist.rate(mkTimePoint(4550), mkTimePoint(4552)));
272     EXPECT_EQ(0, hist.avg<double>(mkTimePoint(4550), mkTimePoint(4552)));
273   }
274
275   // -----------------
276
277   {
278     TimeseriesHistogram<int> hist(10, 0, 100,
279                                   MultiLevelTimeSeries<int>(
280                                     60, IntMTMHTS::NUM_LEVELS,
281                                     IntMTMHTS::kDurations));
282
283     for (int now = 0; now < 3600; now++) {
284       for (int i = 0; i < 100; i++) {
285         hist.addValue(mkTimePoint(now), i, 2); // adds each item 2 times
286       }
287     }
288
289     hist.update(mkTimePoint(3599));
290     for (int pct = 1; pct <= 100; pct++) {
291       int expected = (pct - 1) / 10 * 10;
292       EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::MINUTE));
293       EXPECT_EQ(expected, hist.getPercentileBucketMin(pct,
294                                                       IntMTMHTS::TEN_MINUTE));
295       EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::HOUR));
296       EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::ALLTIME));
297    }
298
299    for (size_t b = 1; (b + 1) < hist.getNumBuckets(); ++b) {
300      EXPECT_EQ(600 * 2, hist.getBucket(b).count(IntMTMHTS::MINUTE));
301      EXPECT_EQ(6000 * 2, hist.getBucket(b).count(IntMTMHTS::TEN_MINUTE));
302      EXPECT_EQ(36000 * 2, hist.getBucket(b).count(IntMTMHTS::HOUR));
303      EXPECT_EQ(36000 * 2, hist.getBucket(b).count(IntMTMHTS::ALLTIME));
304     }
305     EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::MINUTE));
306     EXPECT_EQ(0, hist.getBucket(hist.getNumBuckets() - 1).count(
307                 IntMTMHTS::MINUTE));
308   }
309
310   // -----------------
311
312   {
313     TimeseriesHistogram<int> hist(10, 0, 100,
314                                   MultiLevelTimeSeries<int>(
315                                     60, IntMTMHTS::NUM_LEVELS,
316                                     IntMTMHTS::kDurations));
317
318     for (int now = 0; now < 3600; now++) {
319       for (int i = 0; i < 50; i++) {
320         hist.addValue(mkTimePoint(now), i * 2, 2); // adds each item 2 times
321       }
322     }
323
324     hist.update(mkTimePoint(3599));
325     for (int pct = 1; pct <= 100; pct++) {
326       int expected = (pct - 1) / 10 * 10;
327       EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::MINUTE));
328       EXPECT_EQ(expected, hist.getPercentileBucketMin(pct,
329                                                       IntMTMHTS::TEN_MINUTE));
330       EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::HOUR));
331       EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::ALLTIME));
332     }
333
334     EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::MINUTE));
335     EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::TEN_MINUTE));
336     EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::HOUR));
337     EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::ALLTIME));
338     EXPECT_EQ(0, hist.getBucket(hist.getNumBuckets() - 1).count(
339                 IntMTMHTS::MINUTE));
340     EXPECT_EQ(0,
341               hist.getBucket(hist.getNumBuckets() - 1).
342                 count(IntMTMHTS::TEN_MINUTE));
343     EXPECT_EQ(0, hist.getBucket(hist.getNumBuckets() - 1).count(
344                 IntMTMHTS::HOUR));
345     EXPECT_EQ(0,
346               hist.getBucket(hist.getNumBuckets() - 1).count(
347                 IntMTMHTS::ALLTIME));
348
349     for (size_t b = 1; (b + 1) < hist.getNumBuckets(); ++b) {
350       EXPECT_EQ(600, hist.getBucket(b).count(IntMTMHTS::MINUTE));
351       EXPECT_EQ(6000, hist.getBucket(b).count(IntMTMHTS::TEN_MINUTE));
352       EXPECT_EQ(36000, hist.getBucket(b).count(IntMTMHTS::HOUR));
353       EXPECT_EQ(36000, hist.getBucket(b).count(IntMTMHTS::ALLTIME));
354     }
355
356     for (int i = 0; i < 100; ++i) {
357       hist.addValue(mkTimePoint(3599), 200 + i);
358     }
359     hist.update(mkTimePoint(3599));
360     EXPECT_EQ(100,
361               hist.getBucket(hist.getNumBuckets() - 1).count(
362                 IntMTMHTS::ALLTIME));
363
364   }
365 }
366
367 TEST(TimeseriesHistogram, QueryByInterval) {
368   TimeseriesHistogram<int> mhts(8, 8, 120,
369                                 MultiLevelTimeSeries<int>(
370                                   60, IntMHTS::NUM_LEVELS,
371                                   IntMHTS::kDurations));
372
373   mhts.update(mkTimePoint(0));
374
375   int curTime;
376   for (curTime = 0; curTime < 7200; curTime++) {
377     mhts.addValue(mkTimePoint(curTime), 1);
378   }
379   for (curTime = 7200; curTime < 7200 + 3540; curTime++) {
380     mhts.addValue(mkTimePoint(curTime), 10);
381   }
382   for (curTime = 7200 + 3540; curTime < 7200 + 3600; curTime++) {
383     mhts.addValue(mkTimePoint(curTime), 100);
384   }
385
386   mhts.update(mkTimePoint(7200 + 3600 - 1));
387
388   struct TimeInterval {
389     TimeInterval(int s, int e) : start(mkTimePoint(s)), end(mkTimePoint(e)) {}
390
391     StatsClock::time_point start;
392     StatsClock::time_point end;
393   };
394   TimeInterval intervals[12] = {
395     { curTime - 60, curTime },
396     { curTime - 3600, curTime },
397     { curTime - 7200, curTime },
398     { curTime - 3600, curTime - 60 },
399     { curTime - 7200, curTime - 60 },
400     { curTime - 7200, curTime - 3600 },
401     { curTime - 50, curTime - 20 },
402     { curTime - 3020, curTime - 20 },
403     { curTime - 7200, curTime - 20 },
404     { curTime - 3000, curTime - 1000 },
405     { curTime - 7200, curTime - 1000 },
406     { curTime - 7200, curTime - 3600 },
407   };
408
409   int expectedSums[12] = {
410     6000, 41400, 32400, 35400, 32129, 16200, 3000, 33600, 32308, 20000, 27899,
411     16200
412   };
413
414   int expectedCounts[12] = {
415     60, 3600, 7200, 3540, 7139, 3600, 30, 3000, 7178, 2000, 6199, 3600
416   };
417
418   // The first 7200 values added all fell below the histogram minimum,
419   // and went into the bucket that tracks all of the too-small values.
420   // This bucket reports a minimum value of the smallest possible integer.
421   int belowMinBucket = std::numeric_limits<int>::min();
422
423   int expectedValues[12][3] = {
424     {96, 96, 96},
425     { 8,  8, 96},
426     { belowMinBucket,  belowMinBucket,  8}, // alltime
427     { 8,  8,  8},
428     { belowMinBucket,  belowMinBucket,  8}, // alltime
429     { belowMinBucket,  belowMinBucket,  8}, // alltime
430     {96, 96, 96},
431     { 8,  8, 96},
432     { belowMinBucket,  belowMinBucket,  8}, // alltime
433     { 8,  8,  8},
434     { belowMinBucket,  belowMinBucket,  8}, // alltime
435     { belowMinBucket,  belowMinBucket,  8}  // alltime
436   };
437
438   for (int i = 0; i < 12; i++) {
439     const auto& itv = intervals[i];
440     int s = mhts.sum(itv.start, itv.end);
441     EXPECT_EQ(expectedSums[i], s);
442
443     int c = mhts.count(itv.start, itv.end);
444     EXPECT_EQ(expectedCounts[i], c);
445   }
446
447   // 3 levels
448   for (int i = 1; i <= 100; i++) {
449     EXPECT_EQ(96, mhts.getPercentileBucketMin(i, 0));
450     EXPECT_EQ(
451         96,
452         mhts.getPercentileBucketMin(
453             i, mkTimePoint(curTime - 60), mkTimePoint(curTime)));
454     EXPECT_EQ(
455         8,
456         mhts.getPercentileBucketMin(
457             i, mkTimePoint(curTime - 3540), mkTimePoint(curTime - 60)));
458   }
459
460   EXPECT_EQ(8, mhts.getPercentileBucketMin(1, 1));
461   EXPECT_EQ(8, mhts.getPercentileBucketMin(98, 1));
462   EXPECT_EQ(96, mhts.getPercentileBucketMin(99, 1));
463   EXPECT_EQ(96, mhts.getPercentileBucketMin(100, 1));
464
465   EXPECT_EQ(belowMinBucket, mhts.getPercentileBucketMin(1, 2));
466   EXPECT_EQ(belowMinBucket, mhts.getPercentileBucketMin(66, 2));
467   EXPECT_EQ(8, mhts.getPercentileBucketMin(67, 2));
468   EXPECT_EQ(8, mhts.getPercentileBucketMin(99, 2));
469   EXPECT_EQ(96, mhts.getPercentileBucketMin(100, 2));
470
471   // 0 is currently the value for bucket 0 (below min)
472   for (int i = 0; i < 12; i++) {
473     const auto& itv = intervals[i];
474     int v = mhts.getPercentileBucketMin(1, itv.start, itv.end);
475     EXPECT_EQ(expectedValues[i][0], v);
476
477     v = mhts.getPercentileBucketMin(50, itv.start, itv.end);
478     EXPECT_EQ(expectedValues[i][1], v);
479
480     v = mhts.getPercentileBucketMin(99, itv.start, itv.end);
481     EXPECT_EQ(expectedValues[i][2], v);
482   }
483
484   for (int i = 0; i < 12; i++) {
485     const auto& itv = intervals[i];
486     // Some of the older intervals that fall in the alltime bucket
487     // are off by 1 or 2 in their estimated counts.
488     size_t tolerance = 0;
489     if (itv.start <= mkTimePoint(curTime - 7200)) {
490       tolerance = 2;
491     } else if (itv.start <= mkTimePoint(curTime - 3000)) {
492       tolerance = 1;
493     }
494     size_t actualCount = (itv.end - itv.start).count();
495     size_t estimatedCount = mhts.count(itv.start, itv.end);
496     EXPECT_GE(actualCount, estimatedCount);
497     EXPECT_LE(actualCount - tolerance, estimatedCount);
498   }
499 }
500
501 TEST(TimeseriesHistogram, SingleUniqueValue) {
502   int values[] = {-1, 0, 500, 1000, 1500};
503   for (int ii = 0; ii < 5; ++ii) {
504     int value = values[ii];
505     TimeseriesHistogram<int> h(10, 0, 1000,
506                                MultiLevelTimeSeries<int>(
507                                  60, IntMTMHTS::NUM_LEVELS,
508                                  IntMTMHTS::kDurations));
509
510     const int kNumIters = 1000;
511     for (int jj = 0; jj < kNumIters; ++jj) {
512       h.addValue(mkTimePoint(1), value);
513     }
514     h.update(mkTimePoint(1));
515     // since we've only added one unique value, all percentiles should
516     // be that value
517     EXPECT_EQ(h.getPercentileEstimate(10, 0), value);
518     EXPECT_EQ(h.getPercentileEstimate(50, 0), value);
519     EXPECT_EQ(h.getPercentileEstimate(99, 0), value);
520
521     // Things get trickier if there are multiple unique values.
522     const int kNewValue = 750;
523     for (int kk = 0; kk < 2*kNumIters; ++kk) {
524       h.addValue(mkTimePoint(1), kNewValue);
525     }
526     h.update(mkTimePoint(1));
527     EXPECT_NEAR(h.getPercentileEstimate(50, 0), kNewValue+5, 5);
528     if (value >= 0 && value <= 1000) {
529       // only do further testing if value is within our bucket range,
530       // else estimates can be wildly off
531       if (kNewValue > value) {
532         EXPECT_NEAR(h.getPercentileEstimate(10, 0), value+5, 5);
533         EXPECT_NEAR(h.getPercentileEstimate(99, 0), kNewValue+5, 5);
534       } else {
535         EXPECT_NEAR(h.getPercentileEstimate(10, 0), kNewValue+5, 5);
536         EXPECT_NEAR(h.getPercentileEstimate(99, 0), value+5, 5);
537       }
538     }
539   }
540 }