// Deliberately make this a signed type, because we're about
// to compare it against max-min, which is nominally signed, too.
- int numBuckets = (max - min) / bucketSize;
+ int64_t numBuckets = int64_t((max - min) / bucketSize);
// Round up if the bucket size does not fit evenly
if (numBuckets * bucketSize < max - min) {
++numBuckets;
}
// Add 2 for the extra 'below min' and 'above max' buckets
numBuckets += 2;
- buckets_.assign(numBuckets, defaultBucket);
+ buckets_.assign(size_t(numBuckets), defaultBucket);
}
template <typename T, typename BucketType>
-unsigned int HistogramBuckets<T, BucketType>::getBucketIdx(
- ValueType value) const {
+size_t HistogramBuckets<T, BucketType>::getBucketIdx(ValueType value) const {
if (value < min_) {
return 0;
} else if (value >= max_) {
return buckets_.size() - 1;
} else {
// the 1 is the below_min bucket
- return ((value - min_) / bucketSize_) + 1;
+ return size_t(((value - min_) / bucketSize_) + 1);
}
}
uint64_t HistogramBuckets<T, BucketType>::computeTotalCount(
CountFn countFromBucket) const {
uint64_t count = 0;
- for (unsigned int n = 0; n < buckets_.size(); ++n) {
+ for (size_t n = 0; n < buckets_.size(); ++n) {
count += countFromBucket(const_cast<const BucketType&>(buckets_[n]));
}
return count;
template <typename T, typename BucketType>
template <typename CountFn>
-unsigned int HistogramBuckets<T, BucketType>::getPercentileBucketIdx(
+size_t HistogramBuckets<T, BucketType>::getPercentileBucketIdx(
double pct,
CountFn countFromBucket,
- double* lowPct, double* highPct) const {
+ double* lowPct,
+ double* highPct) const {
CHECK_GE(pct, 0.0);
CHECK_LE(pct, 1.0);
- unsigned int numBuckets = buckets_.size();
+ auto numBuckets = buckets_.size();
// Compute the counts in each bucket
std::vector<uint64_t> counts(numBuckets);
uint64_t totalCount = 0;
- for (unsigned int n = 0; n < numBuckets; ++n) {
+ for (size_t n = 0; n < numBuckets; ++n) {
uint64_t bucketCount =
countFromBucket(const_cast<const BucketType&>(buckets_[n]));
counts[n] = bucketCount;
double prevPct = 0.0;
double curPct = 0.0;
uint64_t curCount = 0;
- unsigned int idx;
+ size_t idx;
for (idx = 0; idx < numBuckets; ++idx) {
if (counts[idx] == 0) {
// skip empty buckets
// Find the bucket where this percentile falls
double lowPct;
double highPct;
- unsigned int bucketIdx = getPercentileBucketIdx(pct, countFromBucket,
- &lowPct, &highPct);
+ size_t bucketIdx =
+ getPercentileBucketIdx(pct, countFromBucket, &lowPct, &highPct);
if (lowPct == 0.0 && highPct == 0.0) {
// Invalid range -- the buckets must all be empty
// Return the default value for ValueType.
// Assume that the data points lower than the median of this bucket
// are uniformly distributed between low and avg
double pctThroughSection = (pct - lowPct) / (medianPct - lowPct);
- return low + ((avg - low) * pctThroughSection);
+ return T(low + ((avg - low) * pctThroughSection));
} else {
// Assume that the data points greater than the median of this bucket
// are uniformly distributed between avg and high
double pctThroughSection = (pct - medianPct) / (highPct - medianPct);
- return avg + ((high - avg) * pctThroughSection);
+ return T(avg + ((high - avg) * pctThroughSection));
}
}
", bucketSize: ", buckets_.getBucketSize(),
", min: ", buckets_.getMin(), ", max: ", buckets_.getMax(), "\n");
- for (unsigned int i = 0; i < buckets_.getNumBuckets(); ++i) {
+ for (size_t i = 0; i < buckets_.getNumBuckets(); ++i) {
folly::toAppend(" ", buckets_.getBucketMin(i), ": ",
buckets_.getByIndex(i).count, "\n",
&ret);
template <typename T>
void Histogram<T>::toTSV(std::ostream& out, bool skipEmptyBuckets) const {
- for (unsigned int i = 0; i < buckets_.getNumBuckets(); ++i) {
+ for (size_t i = 0; i < buckets_.getNumBuckets(); ++i) {
// Do not output empty buckets in order to reduce data file size.
if (skipEmptyBuckets && getBucketByIndex(i).count == 0) {
continue;
#pragma once
#include <cstddef>
+#include <cstdint>
#include <limits>
#include <ostream>
+#include <stdexcept>
#include <string>
#include <vector>
-#include <stdexcept>
#include <folly/CPortability.h>
#include <folly/detail/Stats.h>
* plus 2 extra buckets, one for handling values less than min, and one for
* values greater than max.
*/
- unsigned int getNumBuckets() const {
+ size_t getNumBuckets() const {
return buckets_.size();
}
/* Returns the bucket index into which the given value would fall. */
- unsigned int getBucketIdx(ValueType value) const;
+ size_t getBucketIdx(ValueType value) const;
/* Returns the bucket for the specified value */
BucketType& getByValue(ValueType value) {
* Note that index 0 is the bucket for all values less than the specified
* minimum. Index 1 is the first bucket in the specified bucket range.
*/
- BucketType& getByIndex(unsigned int idx) {
+ BucketType& getByIndex(size_t idx) {
return buckets_[idx];
}
/* Returns the bucket at the specified index. */
- const BucketType& getByIndex(unsigned int idx) const {
+ const BucketType& getByIndex(size_t idx) const {
return buckets_[idx];
}
* [bucketMin, bucketMin + bucketSize), or [bucketMin, max), if the overall
* max is smaller than bucketMin + bucketSize.
*/
- ValueType getBucketMin(unsigned int idx) const {
+ ValueType getBucketMin(size_t idx) const {
if (idx == 0) {
return std::numeric_limits<ValueType>::min();
}
* [bucketMin, bucketMin + bucketSize), or [bucketMin, max), if the overall
* max is smaller than bucketMin + bucketSize.
*/
- ValueType getBucketMax(unsigned int idx) const {
+ ValueType getBucketMax(size_t idx) const {
if (idx == buckets_.size() - 1) {
return std::numeric_limits<ValueType>::max();
}
* data point.
*/
template <typename CountFn>
- unsigned int getPercentileBucketIdx(double pct,
- CountFn countFromBucket,
- double* lowPct = nullptr,
- double* highPct = nullptr) const;
+ size_t getPercentileBucketIdx(
+ double pct,
+ CountFn countFromBucket,
+ double* lowPct = nullptr,
+ double* highPct = nullptr) const;
/**
* Estimate the value at the specified percentile.
/* Remove all data points from the histogram */
void clear() {
- for (unsigned int i = 0; i < buckets_.getNumBuckets(); i++) {
+ for (size_t i = 0; i < buckets_.getNumBuckets(); i++) {
buckets_.getByIndex(i).clear();
}
}
throw std::invalid_argument("Cannot subtract input histogram.");
}
- for (unsigned int i = 0; i < buckets_.getNumBuckets(); i++) {
+ for (size_t i = 0; i < buckets_.getNumBuckets(); i++) {
buckets_.getByIndex(i) -= hist.buckets_.getByIndex(i);
}
}
throw std::invalid_argument("Cannot merge from input histogram.");
}
- for (unsigned int i = 0; i < buckets_.getNumBuckets(); i++) {
+ for (size_t i = 0; i < buckets_.getNumBuckets(); i++) {
buckets_.getByIndex(i) += hist.buckets_.getByIndex(i);
}
}
throw std::invalid_argument("Cannot copy from input histogram.");
}
- for (unsigned int i = 0; i < buckets_.getNumBuckets(); i++) {
+ for (size_t i = 0; i < buckets_.getNumBuckets(); i++) {
buckets_.getByIndex(i) = hist.buckets_.getByIndex(i);
}
}
return buckets_.getMax();
}
/* Returns the number of buckets */
- unsigned int getNumBuckets() const {
+ size_t getNumBuckets() const {
return buckets_.getNumBuckets();
}
/* Returns the specified bucket (for reading only!) */
- const Bucket& getBucketByIndex(int idx) const {
+ const Bucket& getBucketByIndex(size_t idx) const {
return buckets_.getByIndex(idx);
}
* [bucketMin, bucketMin + bucketSize), or [bucketMin, max), if the overall
* max is smaller than bucketMin + bucketSize.
*/
- ValueType getBucketMin(unsigned int idx) const {
+ ValueType getBucketMin(size_t idx) const {
return buckets_.getBucketMin(idx);
}
* [bucketMin, bucketMin + bucketSize), or [bucketMin, max), if the overall
* max is smaller than bucketMin + bucketSize.
*/
- ValueType getBucketMax(unsigned int idx) const {
+ ValueType getBucketMax(size_t idx) const {
return buckets_.getBucketMax(idx);
}
* The lowest and highest percentile data points in returned bucket will be
* returned in the lowPct and highPct arguments, if they are non-NULL.
*/
- unsigned int getPercentileBucketIdx(double pct,
- double* lowPct = nullptr,
- double* highPct = nullptr) const {
+ size_t getPercentileBucketIdx(
+ double pct,
+ double* lowPct = nullptr,
+ double* highPct = nullptr) const {
// We unfortunately can't use lambdas here yet;
// Some users of this code are still built with gcc-4.4.
CountFromBucket countFn;
// are implemented using template methods. Instantiate the default versions of
// these methods too, so anyone using them won't also need to explicitly
// include Histogram-defs.h
-template unsigned int detail::HistogramBuckets<
- int64_t, Histogram<int64_t>::Bucket>::
- getPercentileBucketIdx<Histogram<int64_t>::CountFromBucket>(
- double pct,
- Histogram<int64_t>::CountFromBucket countFromBucket,
- double* lowPct,
- double* highPct) const;
+template size_t detail::HistogramBuckets<int64_t, Histogram<int64_t>::Bucket>::
+ getPercentileBucketIdx<Histogram<int64_t>::CountFromBucket>(
+ double pct,
+ Histogram<int64_t>::CountFromBucket countFromBucket,
+ double* lowPct,
+ double* highPct) const;
template int64_t detail::HistogramBuckets<int64_t, Histogram<int64_t>::Bucket>
::getPercentileEstimate<Histogram<int64_t>::CountFromBucket,
Histogram<int64_t>::AvgFromBucket>(
void MultiLevelTimeSeries<VT, CT>::addValue(
TimePoint now,
const ValueType& val,
- int64_t times) {
+ uint64_t times) {
addValueAggregated(now, val * times, times);
}
void MultiLevelTimeSeries<VT, CT>::addValueAggregated(
TimePoint now,
const ValueType& total,
- int64_t nsamples) {
+ uint64_t nsamples) {
if (cachedTime_ != now) {
flush();
cachedTime_ = now;
* data. Otherwise you may be reading stale data if update() or flush() has
* not been called recently.
*/
- const Level& getLevel(int level) const {
- CHECK(level >= 0);
- CHECK_LT(size_t(level), levels_.size());
+ const Level& getLevel(size_t level) const {
+ CHECK_LT(level, levels_.size());
return levels_[level];
}
* data. Otherwise you may be reading stale data if update() or flush() has
* not been called recently.
*/
- ValueType sum(int level) const {
+ ValueType sum(size_t level) const {
return getLevel(level).sum();
}
* data. Otherwise you may be reading stale data if update() or flush() has
* not been called recently.
*/
- template <typename ReturnType=double>
- ReturnType avg(int level) const {
+ template <typename ReturnType = double>
+ ReturnType avg(size_t level) const {
return getLevel(level).template avg<ReturnType>();
}
* not been called recently.
*/
template <typename ReturnType = double, typename Interval = Duration>
- ReturnType rate(int level) const {
+ ReturnType rate(size_t level) const {
return getLevel(level).template rate<ReturnType, Interval>();
}
* data. Otherwise you may be reading stale data if update() or flush() has
* not been called recently.
*/
- int64_t count(int level) const {
+ uint64_t count(size_t level) const {
return getLevel(level).count();
}
* not been called recently.
*/
template <typename ReturnType = double, typename Interval = Duration>
- ReturnType countRate(int level) const {
+ ReturnType countRate(size_t level) const {
return getLevel(level).template countRate<ReturnType, Interval>();
}
/*
* Return the sum of all the data points currently tracked at this level.
*
- * This method is identical to sum(int level) above but takes in the
+ * This method is identical to sum(size_t level) above but takes in the
* duration that the user is interested in querying as the parameter.
*
* Note: you should generally call update() or flush() before accessing the
* Return the average (sum / count) of all the data points currently tracked
* at this level.
*
- * This method is identical to avg(int level) above but takes in the
+ * This method is identical to avg(size_t level) above but takes in the
* duration that the user is interested in querying as the parameter.
*
* Note: you should generally call update() or flush() before accessing the
* Return the rate (sum divided by elaspsed time) of the all data points
* currently tracked at this level.
*
- * This method is identical to rate(int level) above but takes in the
+ * This method is identical to rate(size_t level) above but takes in the
* duration that the user is interested in querying as the parameter.
*
* Note: you should generally call update() or flush() before accessing the
/*
* Return the number of data points currently tracked at this level.
*
- * This method is identical to count(int level) above but takes in the
+ * This method is identical to count(size_t level) above but takes in the
* duration that the user is interested in querying as the parameter.
*
* Note: you should generally call update() or flush() before accessing the
* data. Otherwise you may be reading stale data if update() or flush() has
* not been called recently.
*/
- int64_t count(Duration duration) const {
+ uint64_t count(Duration duration) const {
return getLevelByDuration(duration).count();
}
/*
* Return the count divided by the elapsed time tracked at this level.
*
- * This method is identical to countRate(int level) above but takes in the
+ * This method is identical to countRate(size_t level) above but takes in the
* duration that the user is interested in querying as the parameter.
*
* Note: you should generally call update() or flush() before accessing the
* data. Otherwise you may be reading stale data if update() or flush() has
* not been called recently.
*/
- int64_t count(TimePoint start, TimePoint end) const {
+ uint64_t count(TimePoint start, TimePoint end) const {
return getLevel(start).count(start, end);
}
/*
* Adds the value 'val' at time 'now' to all levels.
*/
- void addValue(TimePoint now, const ValueType& val, int64_t times);
+ void addValue(TimePoint now, const ValueType& val, uint64_t times);
/*
* Adds the value 'total' at time 'now' to all levels as the sum of
* 'nsamples' samples.
*/
void
- addValueAggregated(TimePoint now, const ValueType& total, int64_t nsamples);
+ addValueAggregated(TimePoint now, const ValueType& total, uint64_t nsamples);
/*
* Update all the levels to the specified time, doing all the necessary
void addValue(Duration now, const ValueType& value) {
addValue(TimePoint(now), value);
}
- void addValue(Duration now, const ValueType& value, int64_t times) {
+ void addValue(Duration now, const ValueType& value, uint64_t times) {
addValue(TimePoint(now), value, times);
}
void
- addValueAggregated(Duration now, const ValueType& total, int64_t nsamples) {
+ addValueAggregated(Duration now, const ValueType& total, uint64_t nsamples) {
addValueAggregated(TimePoint(now), total, nsamples);
}
void TimeseriesHistogram<T, CT, C>::addValue(
TimePoint now,
const ValueType& value,
- int64_t times) {
+ uint64_t times) {
buckets_.getByValue(value).addValue(now, value, times);
maybeHandleSingleUniqueValue(value);
}
CHECK_EQ(hist.getBucketSize(), getBucketSize());
CHECK_EQ(hist.getNumBuckets(), getNumBuckets());
- for (unsigned int n = 0; n < hist.getNumBuckets(); ++n) {
+ for (size_t n = 0; n < hist.getNumBuckets(); ++n) {
const typename folly::Histogram<ValueType>::Bucket& histBucket =
hist.getBucketByIndex(n);
Bucket& myBucket = buckets_.getByIndex(n);
}
template <typename T, typename CT, typename C>
-T TimeseriesHistogram<T, CT, C>::getPercentileEstimate(double pct, int level)
+T TimeseriesHistogram<T, CT, C>::getPercentileEstimate(double pct, size_t level)
const {
if (singleUniqueValue_) {
return firstValue_;
}
template <typename T, typename CT, typename C>
-int TimeseriesHistogram<T, CT, C>::getPercentileBucketIdx(double pct, int level)
- const {
+size_t TimeseriesHistogram<T, CT, C>::getPercentileBucketIdx(
+ double pct,
+ size_t level) const {
return buckets_.getPercentileBucketIdx(pct / 100.0, CountFromLevel(level));
}
template <typename T, typename CT, typename C>
-int TimeseriesHistogram<T, CT, C>::getPercentileBucketIdx(
+size_t TimeseriesHistogram<T, CT, C>::getPercentileBucketIdx(
double pct,
TimePoint start,
TimePoint end) const {
}
template <typename T, typename CT, typename C>
-std::string TimeseriesHistogram<T, CT, C>::getString(int level) const {
+std::string TimeseriesHistogram<T, CT, C>::getString(size_t level) const {
std::string result;
for (size_t i = 0; i < buckets_.getNumBuckets(); i++) {
template <class T, class CT, class C>
void TimeseriesHistogram<T, CT, C>::computeAvgData(
ValueType* total,
- int64_t* nsamples,
- int level) const {
- for (unsigned int b = 0; b < buckets_.getNumBuckets(); ++b) {
+ uint64_t* nsamples,
+ size_t level) const {
+ for (size_t b = 0; b < buckets_.getNumBuckets(); ++b) {
const auto& levelObj = buckets_.getByIndex(b).getLevel(level);
*total += levelObj.sum();
*nsamples += levelObj.count();
template <class T, class CT, class C>
void TimeseriesHistogram<T, CT, C>::computeAvgData(
ValueType* total,
- int64_t* nsamples,
+ uint64_t* nsamples,
TimePoint start,
TimePoint end) const {
- for (unsigned int b = 0; b < buckets_.getNumBuckets(); ++b) {
+ for (size_t b = 0; b < buckets_.getNumBuckets(); ++b) {
const auto& levelObj = buckets_.getByIndex(b).getLevel(start);
*total += levelObj.sum(start, end);
*nsamples += levelObj.count(start, end);
void TimeseriesHistogram<T, CT, C>::computeRateData(
ValueType* total,
Duration* elapsed,
- int level) const {
- for (unsigned int b = 0; b < buckets_.getNumBuckets(); ++b) {
+ size_t level) const {
+ for (size_t b = 0; b < buckets_.getNumBuckets(); ++b) {
const auto& levelObj = buckets_.getByIndex(b).getLevel(level);
*total += levelObj.sum();
*elapsed = std::max(*elapsed, levelObj.elapsed());
Duration* elapsed,
TimePoint start,
TimePoint end) const {
- for (unsigned int b = 0; b < buckets_.getNumBuckets(); ++b) {
+ for (size_t b = 0; b < buckets_.getNumBuckets(); ++b) {
const auto& level = buckets_.getByIndex(b).getLevel(start);
*total += level.sum(start, end);
*elapsed = std::max(*elapsed, level.elapsed(start, end));
ValueType getMax() const { return buckets_.getMax(); }
/* Return the number of levels of the Timeseries object in each bucket */
- int getNumLevels() const {
+ size_t getNumLevels() const {
return buckets_.getByIndex(0).numLevels();
}
/* Return the number of buckets */
- int getNumBuckets() const { return buckets_.getNumBuckets(); }
+ size_t getNumBuckets() const {
+ return buckets_.getNumBuckets();
+ }
/*
* Return the threshold of the bucket for the given index in range
* [0..numBuckets). The bucket will have range [thresh, thresh + bucketSize)
* or [thresh, max), whichever is shorter.
*/
- ValueType getBucketMin(int bucketIdx) const {
+ ValueType getBucketMin(size_t bucketIdx) const {
return buckets_.getBucketMin(bucketIdx);
}
/* Return the actual timeseries in the given bucket (for reading only!) */
- const ContainerType& getBucket(int bucketIdx) const {
+ const ContainerType& getBucket(size_t bucketIdx) const {
return buckets_.getByIndex(bucketIdx);
}
/* Total count of values at the given timeseries level (all buckets). */
- int64_t count(int level) const {
- int64_t total = 0;
- for (unsigned int b = 0; b < buckets_.getNumBuckets(); ++b) {
+ uint64_t count(size_t level) const {
+ uint64_t total = 0;
+ for (size_t b = 0; b < buckets_.getNumBuckets(); ++b) {
total += buckets_.getByIndex(b).count(level);
}
return total;
}
/* Total count of values added during the given interval (all buckets). */
- int64_t count(TimePoint start, TimePoint end) const {
- int64_t total = 0;
- for (unsigned int b = 0; b < buckets_.getNumBuckets(); ++b) {
+ uint64_t count(TimePoint start, TimePoint end) const {
+ uint64_t total = 0;
+ for (size_t b = 0; b < buckets_.getNumBuckets(); ++b) {
total += buckets_.getByIndex(b).count(start, end);
}
return total;
/* Total sum of values at the given timeseries level (all buckets). */
ValueType sum(int level) const {
ValueType total = ValueType();
- for (unsigned int b = 0; b < buckets_.getNumBuckets(); ++b) {
+ for (size_t b = 0; b < buckets_.getNumBuckets(); ++b) {
total += buckets_.getByIndex(b).sum(level);
}
return total;
/* Total sum of values added during the given interval (all buckets). */
ValueType sum(TimePoint start, TimePoint end) const {
ValueType total = ValueType();
- for (unsigned int b = 0; b < buckets_.getNumBuckets(); ++b) {
+ for (size_t b = 0; b < buckets_.getNumBuckets(); ++b) {
total += buckets_.getByIndex(b).sum(start, end);
}
return total;
template <typename ReturnType = double>
ReturnType avg(int level) const {
auto total = ValueType();
- int64_t nsamples = 0;
+ uint64_t nsamples = 0;
computeAvgData(&total, &nsamples, level);
return folly::detail::avgHelper<ReturnType>(total, nsamples);
}
template <typename ReturnType = double>
ReturnType avg(TimePoint start, TimePoint end) const {
auto total = ValueType();
- int64_t nsamples = 0;
+ uint64_t nsamples = 0;
computeAvgData(&total, &nsamples, start, end);
return folly::detail::avgHelper<ReturnType>(total, nsamples);
}
* This is the sum of all values divided by the time interval (in seconds).
*/
template <typename ReturnType = double>
- ReturnType rate(int level) const {
+ ReturnType rate(size_t level) const {
auto total = ValueType();
Duration elapsed(0);
computeRateData(&total, &elapsed, level);
/* Add a value into the histogram with timestamp 'now' */
void addValue(TimePoint now, const ValueType& value);
/* Add a value the given number of times with timestamp 'now' */
- void addValue(TimePoint now, const ValueType& value, int64_t times);
+ void addValue(TimePoint now, const ValueType& value, uint64_t times);
/*
* Add all of the values from the specified histogram.
* average and the known bound is equal to the distance between the average
* and the unknown bound.
*/
- ValueType getPercentileEstimate(double pct, int level) const;
+ ValueType getPercentileEstimate(double pct, size_t level) const;
/*
* Return an estimate of the value at the given percentile in the histogram
* in the given historical interval. Please see the documentation for
- * getPercentileEstimate(int pct, int level) for the explanation of the
+ * getPercentileEstimate(double pct, size_t level) for the explanation of the
* estimation algorithm.
*/
ValueType getPercentileEstimate(double pct, TimePoint start, TimePoint end)
* given timeseries level). This index can then be used to retrieve either
* the bucket threshold, or other data from inside the bucket.
*/
- int getPercentileBucketIdx(double pct, int level) const;
+ size_t getPercentileBucketIdx(double pct, size_t level) const;
/*
* Return the bucket index that the given percentile falls into (in the
* given historical interval). This index can then be used to retrieve either
* the bucket threshold, or other data from inside the bucket.
*/
- int getPercentileBucketIdx(double pct, TimePoint start, TimePoint end) const;
+ size_t getPercentileBucketIdx(double pct, TimePoint start, TimePoint end)
+ const;
/* Get the bucket threshold for the bucket containing the given pct. */
- int getPercentileBucketMin(double pct, int level) const {
+ ValueType getPercentileBucketMin(double pct, size_t level) const {
return getBucketMin(getPercentileBucketIdx(pct, level));
}
/* Get the bucket threshold for the bucket containing the given pct. */
- int getPercentileBucketMin(double pct, TimePoint start, TimePoint end) const {
+ ValueType getPercentileBucketMin(double pct, TimePoint start, TimePoint end)
+ const {
return getBucketMin(getPercentileBucketIdx(pct, start, end));
}
* Format is: BUCKET [',' BUCKET ...]
* Where: BUCKET == bucketMin ':' count ':' avg
*/
- std::string getString(int level) const;
+ std::string getString(size_t level) const;
/*
* Print out serialized data for all buckets in the historical interval.
- * For format, please see getString(int level).
+ * For format, please see getString(size_t level).
*/
std::string getString(TimePoint start, TimePoint end) const;
void addValue(Duration now, const ValueType& value) {
addValue(TimePoint(now), value);
}
- void addValue(Duration now, const ValueType& value, int64_t times) {
+ void addValue(Duration now, const ValueType& value, uint64_t times) {
addValue(TimePoint(now), value, times);
}
void addValues(Duration now, const folly::Histogram<ValueType>& values) {
private:
typedef ContainerType Bucket;
struct CountFromLevel {
- explicit CountFromLevel(int level) : level_(level) {}
+ explicit CountFromLevel(size_t level) : level_(level) {}
uint64_t operator()(const ContainerType& bucket) const {
return bucket.count(level_);
}
private:
- int level_;
+ size_t level_;
};
struct CountFromInterval {
explicit CountFromInterval(TimePoint start, TimePoint end)
};
struct AvgFromLevel {
- explicit AvgFromLevel(int level) : level_(level) {}
+ explicit AvgFromLevel(size_t level) : level_(level) {}
ValueType operator()(const ContainerType& bucket) const {
return bucket.template avg<ValueType>(level_);
}
private:
- int level_;
+ size_t level_;
};
template <typename ReturnType>
*/
void maybeHandleSingleUniqueValue(const ValueType& value);
- void computeAvgData(ValueType* total, int64_t* nsamples, int level) const;
+ void computeAvgData(ValueType* total, uint64_t* nsamples, size_t level) const;
void computeAvgData(
ValueType* total,
- int64_t* nsamples,
+ uint64_t* nsamples,
TimePoint start,
TimePoint end) const;
- void computeRateData(ValueType* total, Duration* elapsed, int level) const;
+ void computeRateData(ValueType* total, Duration* elapsed, size_t level) const;
void computeRateData(
ValueType* total,
Duration* elapsed,
EXPECT_EQ(10, h.getMin());
EXPECT_EQ(110, h.getMax());
- for (int i = 0; i < h.getNumBuckets(); ++i) {
+ for (size_t i = 0; i < h.getNumBuckets(); ++i) {
EXPECT_EQ(4, h.getBucket(i).numLevels());
}
CHECK_EQ(IntMTMHTS::NUM_LEVELS, hist.getNumLevels());
- for (int level = 0; level < hist.getNumLevels(); ++level) {
+ for (size_t level = 0; level < hist.getNumLevels(); ++level) {
EXPECT_EQ(kStringValues1[level], hist.getString(level));
}
CHECK_EQ(IntMTMHTS::NUM_LEVELS, hist.getNumLevels());
- for (int level = 0; level < hist.getNumLevels(); ++level) {
+ for (size_t level = 0; level < hist.getNumLevels(); ++level) {
EXPECT_EQ(kStringValues2[level], hist.getString(level));
}
}
// check clearing
hist.clear();
- for (int b = 0; b < hist.getNumBuckets(); ++b) {
+ for (size_t b = 0; b < hist.getNumBuckets(); ++b) {
EXPECT_EQ(0, hist.getBucket(b).count(IntMTMHTS::MINUTE));
EXPECT_EQ(0, hist.getBucket(b).count(IntMTMHTS::TEN_MINUTE));
EXPECT_EQ(0, hist.getBucket(b).count(IntMTMHTS::HOUR));
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::ALLTIME));
}
- for (int b = 1; (b + 1) < hist.getNumBuckets(); ++b) {
+ for (size_t b = 1; (b + 1) < hist.getNumBuckets(); ++b) {
EXPECT_EQ(600, hist.getBucket(b).count(IntMTMHTS::MINUTE));
EXPECT_EQ(6000, hist.getBucket(b).count(IntMTMHTS::TEN_MINUTE));
EXPECT_EQ(36000, hist.getBucket(b).count(IntMTMHTS::HOUR));
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::ALLTIME));
}
- for (int b = 1; (b + 1) < hist.getNumBuckets(); ++b) {
- EXPECT_EQ(600 * 2, hist.getBucket(b).count(IntMTMHTS::MINUTE));
- EXPECT_EQ(6000 * 2, hist.getBucket(b).count(IntMTMHTS::TEN_MINUTE));
- EXPECT_EQ(36000 * 2, hist.getBucket(b).count(IntMTMHTS::HOUR));
- EXPECT_EQ(36000 * 2, hist.getBucket(b).count(IntMTMHTS::ALLTIME));
+ for (size_t b = 1; (b + 1) < hist.getNumBuckets(); ++b) {
+ EXPECT_EQ(600 * 2, hist.getBucket(b).count(IntMTMHTS::MINUTE));
+ EXPECT_EQ(6000 * 2, hist.getBucket(b).count(IntMTMHTS::TEN_MINUTE));
+ EXPECT_EQ(36000 * 2, hist.getBucket(b).count(IntMTMHTS::HOUR));
+ EXPECT_EQ(36000 * 2, hist.getBucket(b).count(IntMTMHTS::ALLTIME));
}
EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::MINUTE));
EXPECT_EQ(0, hist.getBucket(hist.getNumBuckets() - 1).count(
hist.getBucket(hist.getNumBuckets() - 1).count(
IntMTMHTS::ALLTIME));
- for (int b = 1; (b + 1) < hist.getNumBuckets(); ++b) {
+ for (size_t b = 1; (b + 1) < hist.getNumBuckets(); ++b) {
EXPECT_EQ(600, hist.getBucket(b).count(IntMTMHTS::MINUTE));
EXPECT_EQ(6000, hist.getBucket(b).count(IntMTMHTS::TEN_MINUTE));
EXPECT_EQ(36000, hist.getBucket(b).count(IntMTMHTS::HOUR));