Commit 344f49c4 authored by Yedidya Feldblum's avatar Yedidya Feldblum Committed by Facebook Github Bot

Apply clang-format to folly/stats/

Summary: [Folly] Apply clang-format to `folly/stats/`.

Reviewed By: Orvid

Differential Revision: D5366745

fbshipit-source-id: 3b7419d4ab4c6203693603722cd8e707741d3953
parent 1fd03592
......@@ -173,8 +173,8 @@ size_t BucketedTimeSeries<VT, CT>::updateBuckets(TimePoint now) {
size_t currentBucket;
TimePoint currentBucketStart;
TimePoint nextBucketStart;
getBucketInfo(latestTime_, &currentBucket,
&currentBucketStart, &nextBucketStart);
getBucketInfo(
latestTime_, &currentBucket, &currentBucketStart, &nextBucketStart);
// Update latestTime_
latestTime_ = now;
......@@ -246,8 +246,8 @@ typename CT::time_point BucketedTimeSeries<VT, CT>::getEarliestTimeNonEmpty()
size_t currentBucket;
TimePoint currentBucketStart;
TimePoint nextBucketStart;
getBucketInfo(latestTime_, &currentBucket,
&currentBucketStart, &nextBucketStart);
getBucketInfo(
latestTime_, &currentBucket, &currentBucketStart, &nextBucketStart);
// Subtract 1 duration from the start of the next bucket to find the
// earliest possible data point we could be tracking.
......@@ -500,7 +500,7 @@ VT BucketedTimeSeries<VT, CT>::rangeAdjust(
TimePoint intervalStart = std::max(start, bucketStart);
TimePoint intervalEnd = std::min(end, nextBucketStart);
return input * (intervalEnd - intervalStart) /
(nextBucketStart - bucketStart);
(nextBucketStart - bucketStart);
}
template <typename VT, typename CT>
......
......@@ -293,7 +293,7 @@ class BucketedTimeSeries {
* Note that you generally should call update() before calling avg(), to
* make sure you are not reading stale data.
*/
template <typename ReturnType=double>
template <typename ReturnType = double>
ReturnType avg() const {
return total_.template avg<ReturnType>();
}
......@@ -483,8 +483,8 @@ class BucketedTimeSeries {
TimePoint latestTime_; // time of last update()
Duration duration_; // total duration ("window length") of the time series
Bucket total_; // sum and count of everything in time series
std::vector<Bucket> buckets_; // actual buckets of values
Bucket total_; // sum and count of everything in time series
std::vector<Bucket> buckets_; // actual buckets of values
};
} // folly
......@@ -26,13 +26,12 @@ namespace folly {
namespace detail {
template <typename T, typename BucketT>
HistogramBuckets<T, BucketT>::HistogramBuckets(ValueType bucketSize,
ValueType min,
ValueType max,
const BucketType& defaultBucket)
: bucketSize_(bucketSize),
min_(min),
max_(max) {
HistogramBuckets<T, BucketT>::HistogramBuckets(
ValueType bucketSize,
ValueType min,
ValueType max,
const BucketType& defaultBucket)
: bucketSize_(bucketSize), min_(min), max_(max) {
CHECK_GT(bucketSize_, ValueType(0));
CHECK_LT(min_, max_);
......@@ -88,7 +87,7 @@ size_t HistogramBuckets<T, BucketType>::getPercentileBucketIdx(
uint64_t totalCount = 0;
for (size_t n = 0; n < numBuckets; ++n) {
uint64_t bucketCount =
countFromBucket(const_cast<const BucketType&>(buckets_[n]));
countFromBucket(const_cast<const BucketType&>(buckets_[n]));
counts[n] = bucketCount;
totalCount += bucketCount;
}
......@@ -146,7 +145,6 @@ T HistogramBuckets<T, BucketType>::getPercentileEstimate(
double pct,
CountFn countFromBucket,
AvgFn avgFromBucket) const {
// Find the bucket where this percentile falls
double lowPct;
double highPct;
......@@ -183,8 +181,8 @@ T HistogramBuckets<T, BucketType>::getPercentileEstimate(
// (Note that if the counter keeps being decremented, eventually it will
// wrap and become small enough that we won't detect this any more, and
// we will return bogus information.)
LOG(ERROR) << "invalid average value in histogram minimum bucket: " <<
avg << " > " << min_ << ": possible integer overflow?";
LOG(ERROR) << "invalid average value in histogram minimum bucket: " << avg
<< " > " << min_ << ": possible integer overflow?";
return getBucketMin(bucketIdx);
}
// For the below-min bucket, just assume the lowest value ever seen is
......@@ -199,8 +197,8 @@ T HistogramBuckets<T, BucketType>::getPercentileEstimate(
if (avg < max_) {
// Most likely this means integer overflow occurred. See the comments
// above in the minimum case.
LOG(ERROR) << "invalid average value in histogram maximum bucket: " <<
avg << " < " << max_ << ": possible integer overflow?";
LOG(ERROR) << "invalid average value in histogram maximum bucket: " << avg
<< " < " << max_ << ": possible integer overflow?";
return getBucketMax(bucketIdx);
}
// Similarly for the above-max bucket, assume the highest value ever seen
......@@ -218,9 +216,9 @@ T HistogramBuckets<T, BucketType>::getPercentileEstimate(
// Most likely this means an integer overflow occurred.
// See the comments above. Return the midpoint between low and high
// as a best guess, since avg is meaningless.
LOG(ERROR) << "invalid average value in histogram bucket: " <<
avg << " not in range [" << low << ", " << high <<
"]: possible integer overflow?";
LOG(ERROR) << "invalid average value in histogram bucket: " << avg
<< " not in range [" << low << ", " << high
<< "]: possible integer overflow?";
return (low + high) / 2;
}
}
......@@ -247,18 +245,27 @@ T HistogramBuckets<T, BucketType>::getPercentileEstimate(
} // detail
template <typename T>
std::string Histogram<T>::debugString() const {
std::string ret = folly::to<std::string>(
"num buckets: ", buckets_.getNumBuckets(),
", bucketSize: ", buckets_.getBucketSize(),
", min: ", buckets_.getMin(), ", max: ", buckets_.getMax(), "\n");
"num buckets: ",
buckets_.getNumBuckets(),
", bucketSize: ",
buckets_.getBucketSize(),
", min: ",
buckets_.getMin(),
", max: ",
buckets_.getMax(),
"\n");
for (size_t i = 0; i < buckets_.getNumBuckets(); ++i) {
folly::toAppend(" ", buckets_.getBucketMin(i), ": ",
buckets_.getByIndex(i).count, "\n",
&ret);
folly::toAppend(
" ",
buckets_.getBucketMin(i),
": ",
buckets_.getByIndex(i).count,
"\n",
&ret);
}
return ret;
......@@ -272,8 +279,8 @@ void Histogram<T>::toTSV(std::ostream& out, bool skipEmptyBuckets) const {
continue;
}
const auto& bucket = getBucketByIndex(i);
out << getBucketMin(i) << '\t' << getBucketMax(i) << '\t'
<< bucket.count << '\t' << bucket.sum << '\n';
out << getBucketMin(i) << '\t' << getBucketMax(i) << '\t' << bucket.count
<< '\t' << bucket.sum << '\n';
}
}
......
......@@ -40,14 +40,16 @@ template size_t detail::HistogramBuckets<int64_t, Histogram<int64_t>::Bucket>::
Histogram<int64_t>::CountFromBucket countFromBucket,
double* lowPct,
double* highPct) const;
template int64_t detail::HistogramBuckets<int64_t, Histogram<int64_t>::Bucket>
::getPercentileEstimate<Histogram<int64_t>::CountFromBucket,
Histogram<int64_t>::AvgFromBucket>(
double pct,
Histogram<int64_t>::CountFromBucket countFromBucket,
Histogram<int64_t>::AvgFromBucket avgFromBucket) const;
template uint64_t detail::HistogramBuckets<int64_t, Histogram<int64_t>::Bucket>
::computeTotalCount<Histogram<int64_t>::CountFromBucket>(
Histogram<int64_t>::CountFromBucket countFromBucket) const;
template int64_t detail::HistogramBuckets<int64_t, Histogram<int64_t>::Bucket>::
getPercentileEstimate<
Histogram<int64_t>::CountFromBucket,
Histogram<int64_t>::AvgFromBucket>(
double pct,
Histogram<int64_t>::CountFromBucket countFromBucket,
Histogram<int64_t>::AvgFromBucket avgFromBucket) const;
template uint64_t
detail::HistogramBuckets<int64_t, Histogram<int64_t>::Bucket>::
computeTotalCount<Histogram<int64_t>::CountFromBucket>(
Histogram<int64_t>::CountFromBucket countFromBucket) const;
} // folly
......@@ -53,8 +53,11 @@ class HistogramBuckets {
*
* (max - min) must be larger than or equal to bucketSize.
*/
HistogramBuckets(ValueType bucketSize, ValueType min, ValueType max,
const BucketType& defaultBucket);
HistogramBuckets(
ValueType bucketSize,
ValueType min,
ValueType max,
const BucketType& defaultBucket);
/* Returns the bucket size of each bucket in the histogram. */
ValueType getBucketSize() const {
......@@ -191,9 +194,10 @@ class HistogramBuckets {
* percentage of the data points in the histogram are less than N.
*/
template <typename CountFn, typename AvgFn>
ValueType getPercentileEstimate(double pct,
CountFn countFromBucket,
AvgFn avgFromBucket) const;
ValueType getPercentileEstimate(
double pct,
CountFn countFromBucket,
AvgFn avgFromBucket) const;
/*
* Iterator access to the buckets.
......@@ -224,7 +228,6 @@ class HistogramBuckets {
} // detail
/*
* A basic histogram class.
*
......@@ -242,7 +245,7 @@ class Histogram {
typedef detail::Bucket<T> Bucket;
Histogram(ValueType bucketSize, ValueType min, ValueType max)
: buckets_(bucketSize, min, max, Bucket()) {}
: buckets_(bucketSize, min, max, Bucket()) {}
/* Add a data point to the histogram */
void addValue(ValueType value) FOLLY_DISABLE_UNDEFINED_BEHAVIOR_SANITIZER(
......@@ -313,13 +316,11 @@ class Histogram {
}
/* Subtract another histogram data from the histogram */
void subtract(const Histogram &hist) {
void subtract(const Histogram& hist) {
// the two histogram bucket definitions must match to support
// subtract.
if (getBucketSize() != hist.getBucketSize() ||
getMin() != hist.getMin() ||
getMax() != hist.getMax() ||
getNumBuckets() != hist.getNumBuckets() ) {
if (getBucketSize() != hist.getBucketSize() || getMin() != hist.getMin() ||
getMax() != hist.getMax() || getNumBuckets() != hist.getNumBuckets()) {
throw std::invalid_argument("Cannot subtract input histogram.");
}
......@@ -329,13 +330,11 @@ class Histogram {
}
/* Merge two histogram data together */
void merge(const Histogram &hist) {
void merge(const Histogram& hist) {
// the two histogram bucket definitions must match to support
// a merge.
if (getBucketSize() != hist.getBucketSize() ||
getMin() != hist.getMin() ||
getMax() != hist.getMax() ||
getNumBuckets() != hist.getNumBuckets() ) {
if (getBucketSize() != hist.getBucketSize() || getMin() != hist.getMin() ||
getMax() != hist.getMax() || getNumBuckets() != hist.getNumBuckets()) {
throw std::invalid_argument("Cannot merge from input histogram.");
}
......@@ -345,12 +344,10 @@ class Histogram {
}
/* Copy bucket values from another histogram */
void copy(const Histogram &hist) {
void copy(const Histogram& hist) {
// the two histogram bucket definition must match
if (getBucketSize() != hist.getBucketSize() ||
getMin() != hist.getMin() ||
getMax() != hist.getMax() ||
getNumBuckets() != hist.getNumBuckets() ) {
if (getBucketSize() != hist.getBucketSize() || getMin() != hist.getMin() ||
getMax() != hist.getMax() || getNumBuckets() != hist.getNumBuckets()) {
throw std::invalid_argument("Cannot copy from input histogram.");
}
......
......@@ -113,7 +113,7 @@ void MultiLevelTimeSeries<VT, CT>::flush() {
template <typename VT, typename CT>
void MultiLevelTimeSeries<VT, CT>::clear() {
for (auto & level : levels_) {
for (auto& level : levels_) {
level.clear();
}
......@@ -122,4 +122,4 @@ void MultiLevelTimeSeries<VT, CT>::clear() {
cachedCount_ = 0;
}
} // folly
} // namespace folly
......@@ -89,7 +89,9 @@ class MultiLevelTimeSeries {
/*
* Return the number of levels tracked by MultiLevelTimeSeries.
*/
size_t numLevels() const { return levels_.size(); }
size_t numLevels() const {
return levels_.size();
}
/*
* Get the BucketedTimeSeries backing the specified level.
......
......@@ -62,7 +62,7 @@ void TimeseriesHistogram<T, CT, C>::addValues(
for (size_t n = 0; n < hist.getNumBuckets(); ++n) {
const typename folly::Histogram<ValueType>::Bucket& histBucket =
hist.getBucketByIndex(n);
hist.getBucketByIndex(n);
Bucket& myBucket = buckets_.getByIndex(n);
myBucket.addValueAggregated(now, histBucket.sum, histBucket.count);
}
......@@ -93,8 +93,8 @@ T TimeseriesHistogram<T, CT, C>::getPercentileEstimate(double pct, size_t level)
return firstValue_;
}
return buckets_.getPercentileEstimate(pct / 100.0, CountFromLevel(level),
AvgFromLevel(level));
return buckets_.getPercentileEstimate(
pct / 100.0, CountFromLevel(level), AvgFromLevel(level));
}
template <typename T, typename CT, typename C>
......@@ -106,9 +106,10 @@ T TimeseriesHistogram<T, CT, C>::getPercentileEstimate(
return firstValue_;
}
return buckets_.getPercentileEstimate(pct / 100.0,
CountFromInterval(start, end),
AvgFromInterval<T>(start, end));
return buckets_.getPercentileEstimate(
pct / 100.0,
CountFromInterval(start, end),
AvgFromInterval<T>(start, end));
}
template <typename T, typename CT, typename C>
......@@ -123,8 +124,8 @@ size_t TimeseriesHistogram<T, CT, C>::getPercentileBucketIdx(
double pct,
TimePoint start,
TimePoint end) const {
return buckets_.getPercentileBucketIdx(pct / 100.0,
CountFromInterval(start, end));
return buckets_.getPercentileBucketIdx(
pct / 100.0, CountFromInterval(start, end));
}
template <typename T, typename CT, typename C>
......@@ -150,9 +151,13 @@ std::string TimeseriesHistogram<T, CT, C>::getString(size_t level) const {
toAppend(",", &result);
}
const ContainerType& cont = buckets_.getByIndex(i);
toAppend(buckets_.getBucketMin(i),
":", cont.count(level),
":", cont.template avg<ValueType>(level), &result);
toAppend(
buckets_.getBucketMin(i),
":",
cont.count(level),
":",
cont.template avg<ValueType>(level),
&result);
}
return result;
......@@ -169,9 +174,13 @@ std::string TimeseriesHistogram<T, CT, C>::getString(
toAppend(",", &result);
}
const ContainerType& cont = buckets_.getByIndex(i);
toAppend(buckets_.getBucketMin(i),
":", cont.count(start, end),
":", cont.avg(start, end), &result);
toAppend(
buckets_.getBucketMin(i),
":",
cont.count(start, end),
":",
cont.avg(start, end),
&result);
}
return result;
......@@ -227,4 +236,4 @@ void TimeseriesHistogram<T, CT, C>::computeRateData(
}
}
} // namespace folly
} // namespace folly
......@@ -16,9 +16,9 @@
#pragma once
#include <string>
#include <folly/stats/Histogram.h>
#include <folly/stats/MultiLevelTimeSeries.h>
#include <string>
namespace folly {
......@@ -53,8 +53,8 @@ template <
class C = folly::MultiLevelTimeSeries<T, CT>>
class TimeseriesHistogram {
private:
// NOTE: T must be equivalent to _signed_ numeric type for our math.
static_assert(std::numeric_limits<T>::is_signed, "");
// NOTE: T must be equivalent to _signed_ numeric type for our math.
static_assert(std::numeric_limits<T>::is_signed, "");
public:
// Values to be inserted into container
......@@ -80,17 +80,26 @@ class TimeseriesHistogram {
* @param defaultContainer a pre-initialized timeseries with the desired
* number of levels and their durations.
*/
TimeseriesHistogram(ValueType bucketSize, ValueType min, ValueType max,
const ContainerType& defaultContainer);
TimeseriesHistogram(
ValueType bucketSize,
ValueType min,
ValueType max,
const ContainerType& defaultContainer);
/* Return the bucket size of each bucket in the histogram. */
ValueType getBucketSize() const { return buckets_.getBucketSize(); }
ValueType getBucketSize() const {
return buckets_.getBucketSize();
}
/* Return the min value at which bucketing begins. */
ValueType getMin() const { return buckets_.getMin(); }
ValueType getMin() const {
return buckets_.getMin();
}
/* Return the max value at which bucketing ends. */
ValueType getMax() const { return buckets_.getMax(); }
ValueType getMax() const {
return buckets_.getMax();
}
/* Return the number of levels of the Timeseries object in each bucket */
size_t getNumLevels() const {
......@@ -386,4 +395,4 @@ class TimeseriesHistogram {
bool singleUniqueValue_;
ValueType firstValue_;
};
} // folly
} // namespace folly
......@@ -20,7 +20,8 @@
#include <cstdint>
#include <type_traits>
namespace folly { namespace detail {
namespace folly {
namespace detail {
/*
* Helper function to compute the average, given a specified input type and
......@@ -31,7 +32,9 @@ namespace folly { namespace detail {
// precision.
template <typename ReturnType>
ReturnType avgHelper(long double sum, uint64_t count) {
if (count == 0) { return ReturnType(0); }
if (count == 0) {
return ReturnType(0);
}
const long double countf = count;
return static_cast<ReturnType>(sum / countf);
}
......@@ -39,11 +42,13 @@ ReturnType avgHelper(long double sum, uint64_t count) {
// In all other cases divide using double precision.
// This should be relatively fast, and accurate enough for most use cases.
template <typename ReturnType, typename ValueType>
typename std::enable_if<!std::is_same<typename std::remove_cv<ValueType>::type,
long double>::value,
ReturnType>::type
typename std::enable_if<
!std::is_same<typename std::remove_cv<ValueType>::type, long double>::value,
ReturnType>::type
avgHelper(ValueType sum, uint64_t count) {
if (count == 0) { return ReturnType(0); }
if (count == 0) {
return ReturnType(0);
}
const double sumf = double(sum);
const double countf = double(count);
return static_cast<ReturnType>(sumf / countf);
......@@ -73,23 +78,21 @@ ReturnType rateHelper(ReturnType count, Duration elapsed) {
std::ratio<Duration::period::den, Duration::period::num>>
NativeRate;
typedef std::chrono::duration<
ReturnType, std::ratio<Interval::period::den,
Interval::period::num>> DesiredRate;
ReturnType,
std::ratio<Interval::period::den, Interval::period::num>>
DesiredRate;
NativeRate native(count / elapsed.count());
DesiredRate desired = std::chrono::duration_cast<DesiredRate>(native);
return desired.count();
}
template<typename T>
template <typename T>
struct Bucket {
public:
typedef T ValueType;
Bucket()
: sum(ValueType()),
count(0) {}
Bucket() : sum(ValueType()), count(0) {}
void clear() {
sum = ValueType();
......@@ -122,5 +125,5 @@ struct Bucket {
ValueType sum;
uint64_t count;
};
}} // folly::detail
} // namespace detail
} // namespace folly
......@@ -13,20 +13,23 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <folly/stats/BucketedTimeSeries.h>
#include <folly/stats/BucketedTimeSeries-defs.h>
#include <glog/logging.h>
#include <folly/Benchmark.h>
#include <folly/stats/BucketedTimeSeries-defs.h>
using std::chrono::seconds;
using folly::BenchmarkSuspender;
using folly::BucketedTimeSeries;
void addValue(unsigned int iters,
seconds duration, size_t numBuckets,
size_t callsPerSecond) {
void addValue(
unsigned int iters,
seconds duration,
size_t numBuckets,
size_t callsPerSecond) {
BenchmarkSuspender suspend;
BucketedTimeSeries<int64_t> ts(numBuckets, duration);
suspend.dismiss();
......@@ -70,7 +73,7 @@ BENCHMARK_NAMED_PARAM(addValue, 100x10_100perSec, seconds(100), 10, 100);
BENCHMARK_NAMED_PARAM(addValue, 71x5_100perSec, seconds(71), 5, 100);
BENCHMARK_NAMED_PARAM(addValue, 1x1_100perSec, seconds(1), 1, 100);
int main(int argc, char *argv[]) {
int main(int argc, char* argv[]) {
gflags::ParseCommandLineFlags(&argc, &argv, true);
folly::runBenchmarks();
return 0;
......
......@@ -27,7 +27,9 @@ void addValue(unsigned int n, int64_t bucketSize, int64_t min, int64_t max) {
FOR_EACH_RANGE (i, 0, n) {
hist.addValue(num);
++num;
if (num > max) { num = min; }
if (num > max) {
num = min;
}
}
}
......@@ -35,7 +37,7 @@ BENCHMARK_NAMED_PARAM(addValue, 0_to_100, 1, 0, 100);
BENCHMARK_NAMED_PARAM(addValue, 0_to_1000, 10, 0, 1000);
BENCHMARK_NAMED_PARAM(addValue, 5k_to_20k, 250, 5000, 20000);
int main(int argc, char *argv[]) {
int main(int argc, char* argv[]) {
gflags::ParseCommandLineFlags(&argc, &argv, true);
folly::runBenchmarks();
return 0;
......
......@@ -15,9 +15,9 @@
*/
#include <folly/stats/Histogram.h>
#include <folly/stats/Histogram-defs.h>
#include <folly/portability/GTest.h>
#include <folly/stats/Histogram-defs.h>
using folly::Histogram;
......@@ -42,8 +42,8 @@ TEST(Histogram, Test100) {
if (n < 100) {
double lowPct = -1.0;
double highPct = -1.0;
unsigned int bucketIdx = h.getPercentileBucketIdx(pct + epsilon,
&lowPct, &highPct);
unsigned int bucketIdx =
h.getPercentileBucketIdx(pct + epsilon, &lowPct, &highPct);
EXPECT_EQ(n + 1, bucketIdx);
EXPECT_FLOAT_EQ(n / 100.0, lowPct);
EXPECT_FLOAT_EQ((n + 1) / 100.0, highPct);
......@@ -53,8 +53,8 @@ TEST(Histogram, Test100) {
if (n > 0) {
double lowPct = -1.0;
double highPct = -1.0;
unsigned int bucketIdx = h.getPercentileBucketIdx(pct - epsilon,
&lowPct, &highPct);
unsigned int bucketIdx =
h.getPercentileBucketIdx(pct - epsilon, &lowPct, &highPct);
EXPECT_EQ(n, bucketIdx);
EXPECT_FLOAT_EQ((n - 1) / 100.0, lowPct);
EXPECT_FLOAT_EQ(n / 100.0, highPct);
......@@ -212,7 +212,7 @@ TEST(Histogram, Counts) {
// Add one to each bucket, make sure the counts match
for (int32_t i = 0; i < 10; i++) {
h.addValue(i);
EXPECT_EQ(i+1, h.computeTotalCount());
EXPECT_EQ(i + 1, h.computeTotalCount());
}
// Add a lot to one bucket, make sure the counts still make sense
......
......@@ -69,16 +69,16 @@ struct TestData {
vector<TimePoint> bucketStarts;
};
vector<TestData> testData = {
// 71 seconds x 4 buckets
{ 71, 4, {0, 18, 36, 54}},
// 100 seconds x 10 buckets
{ 100, 10, {0, 10, 20, 30, 40, 50, 60, 70, 80, 90}},
// 10 seconds x 10 buckets
{ 10, 10, {0, 1, 2, 3, 4, 5, 6, 7, 8, 9}},
// 10 seconds x 1 buckets
{ 10, 1, {0}},
// 1 second x 1 buckets
{ 1, 1, {0}},
// 71 seconds x 4 buckets
{71, 4, {0, 18, 36, 54}},
// 100 seconds x 10 buckets
{100, 10, {0, 10, 20, 30, 40, 50, 60, 70, 80, 90}},
// 10 seconds x 10 buckets
{10, 10, {0, 1, 2, 3, 4, 5, 6, 7, 8, 9}},
// 10 seconds x 1 buckets
{10, 1, {0}},
// 1 second x 1 buckets
{1, 1, {0}},
};
}
......@@ -119,11 +119,11 @@ TEST(BucketedTimeSeries, getBucketInfo) {
size_t returnedIdx;
TimePoint returnedStart;
TimePoint returnedNextStart;
ts.getBucketInfo(expectedStart, &returnedIdx,
&returnedStart, &returnedNextStart);
EXPECT_EQ(idx, returnedIdx) << data.duration << "x" << data.numBuckets
<< ": " << point.first << "="
<< point.second;
ts.getBucketInfo(
expectedStart, &returnedIdx, &returnedStart, &returnedNextStart);
EXPECT_EQ(idx, returnedIdx)
<< data.duration << "x" << data.numBuckets << ": " << point.first
<< "=" << point.second;
EXPECT_EQ(expectedStart, returnedStart)
<< data.duration << "x" << data.numBuckets << ": " << point.first
<< "=" << point.second;
......@@ -167,7 +167,7 @@ void testUpdate100x10(size_t offset) {
setup();
ts.update(seconds(151 + offset));
EXPECT_EQ(4, ts.count());
//EXPECT_EQ(6, ts.sum());
// EXPECT_EQ(6, ts.sum());
EXPECT_EQ(6, ts.avg());
// The last time we added was 95.
......@@ -399,9 +399,10 @@ TEST(BucketedTimeSeries, avgTypeConversion) {
{
// Test uint64_t values that would overflow int64_t
BucketedTimeSeries<uint64_t> ts(60, seconds(600));
ts.addValueAggregated(seconds(0),
std::numeric_limits<uint64_t>::max(),
std::numeric_limits<uint64_t>::max());
ts.addValueAggregated(
seconds(0),
std::numeric_limits<uint64_t>::max(),
std::numeric_limits<uint64_t>::max());
EXPECT_DOUBLE_EQ(1.0, ts.avg());
EXPECT_DOUBLE_EQ(1.0, ts.avg<float>());
......@@ -443,9 +444,7 @@ TEST(BucketedTimeSeries, avgTypeConversion) {
// but the average fits in an int64_t
BucketedTimeSeries<double> ts(60, seconds(600));
uint64_t value = 0x3fffffffffffffff;
FOR_EACH_RANGE(i, 0, 16) {
ts.addValue(seconds(0), value);
}
FOR_EACH_RANGE (i, 0, 16) { ts.addValue(seconds(0), value); }
EXPECT_DOUBLE_EQ(value, ts.avg());
EXPECT_DOUBLE_EQ(value, ts.avg<float>());
......@@ -458,9 +457,7 @@ TEST(BucketedTimeSeries, avgTypeConversion) {
{
// Test BucketedTimeSeries with a smaller integer type
BucketedTimeSeries<int16_t> ts(60, seconds(600));
FOR_EACH_RANGE(i, 0, 101) {
ts.addValue(seconds(0), i);
}
FOR_EACH_RANGE (i, 0, 101) { ts.addValue(seconds(0), i); }
EXPECT_DOUBLE_EQ(50.0, ts.avg());
EXPECT_DOUBLE_EQ(50.0, ts.avg<float>());
......@@ -513,10 +510,9 @@ TEST(BucketedTimeSeries, forEachBucket) {
BucketedTimeSeries<int64_t> ts(data.numBuckets, seconds(data.duration));
vector<BucketInfo> info;
auto fn = [&](
const Bucket& bucket,
TimePoint bucketStart,
TimePoint bucketEnd) -> bool {
auto fn = [&](const Bucket& bucket,
TimePoint bucketStart,
TimePoint bucketEnd) -> bool {
info.emplace_back(&bucket, bucketStart, bucketEnd);
return true;
};
......@@ -589,24 +585,26 @@ TEST(BucketedTimeSeries, queryByInterval) {
// 0: time=[0, 2): values=(0, 1), sum=1, count=2
// 1: time=[2, 4): values=(2, 3), sum=5, count=1
// 2: time=[4, 6): values=(4, 5), sum=9, count=2
// clang-format off
double expectedSums1[kDuration + 1][kDuration + 1] = {
{0, 4.5, 9, 11.5, 14, 14.5, 15},
{0, 4.5, 7, 9.5, 10, 10.5, -1},
{0, 2.5, 5, 5.5, 6, -1, -1},
{0, 2.5, 3, 3.5, -1, -1, -1},
{0, 0.5, 1, -1, -1, -1, -1},
{0, 0.5, -1, -1, -1, -1, -1},
{0, -1, -1, -1, -1, -1, -1}
{0, 4.5, 9, 11.5, 14, 14.5, 15},
{0, 4.5, 7, 9.5, 10, 10.5, -1},
{0, 2.5, 5, 5.5, 6, -1, -1},
{0, 2.5, 3, 3.5, -1, -1, -1},
{0, 0.5, 1, -1, -1, -1, -1},
{0, 0.5, -1, -1, -1, -1, -1},
{0, -1, -1, -1, -1, -1, -1},
};
int expectedCounts1[kDuration + 1][kDuration + 1] = {
{0, 1, 2, 3, 4, 5, 6},
{0, 1, 2, 3, 4, 5, -1},
{0, 1, 2, 3, 4, -1, -1},
{0, 1, 2, 3, -1, -1, -1},
{0, 1, 2, -1, -1, -1, -1},
{0, 1, -1, -1, -1, -1, -1},
{0, -1, -1, -1, -1, -1, -1}
{0, 1, 2, 3, 4, 5, 6},
{0, 1, 2, 3, 4, 5, -1},
{0, 1, 2, 3, 4, -1, -1},
{0, 1, 2, 3, -1, -1, -1},
{0, 1, 2, -1, -1, -1, -1},
{0, 1, -1, -1, -1, -1, -1},
{0, -1, -1, -1, -1, -1, -1},
};
// clang-format on
TimePoint currentTime = b.getLatestTime() + seconds(1);
for (int i = 0; i <= kDuration + 1; i++) {
......@@ -646,24 +644,26 @@ TEST(BucketedTimeSeries, queryByInterval) {
// 0: time=[6, 8): values=(6, 7), sum=13, count=2
// 1: time=[8, 10): values=(8), sum=8, count=1
// 2: time=[4, 6): values=(4, 5), sum=9, count=2
// clang-format off
double expectedSums2[kDuration + 1][kDuration + 1] = {
{0, 8, 14.5, 21, 25.5, 30, 30},
{0, 6.5, 13, 17.5, 22, 22, -1},
{0, 6.5, 11, 15.5, 15.5, -1, -1},
{0, 4.5, 9, 9, -1, -1, -1},
{0, 4.5, 4.5, -1, -1, -1, -1},
{0, 0, -1, -1, -1, -1, -1},
{0, -1, -1, -1, -1, -1, -1}
{0, 8, 14.5, 21, 25.5, 30, 30},
{0, 6.5, 13, 17.5, 22, 22, -1},
{0, 6.5, 11, 15.5, 15.5, -1, -1},
{0, 4.5, 9, 9, -1, -1, -1},
{0, 4.5, 4.5, -1, -1, -1, -1},
{0, 0, -1, -1, -1, -1, -1},
{0, -1, -1, -1, -1, -1, -1},
};
int expectedCounts2[kDuration + 1][kDuration + 1] = {
{0, 1, 2, 3, 4, 5, 5},
{0, 1, 2, 3, 4, 4, -1},
{0, 1, 2, 3, 3, -1, -1},
{0, 1, 2, 2, -1, -1, -1},
{0, 1, 1, -1, -1, -1, -1},
{0, 0, -1, -1, -1, -1, -1},
{0, -1, -1, -1, -1, -1, -1}
{0, 1, 2, 3, 4, 5, 5},
{0, 1, 2, 3, 4, 4, -1},
{0, 1, 2, 3, 3, -1, -1},
{0, 1, 2, 2, -1, -1, -1},
{0, 1, 1, -1, -1, -1, -1},
{0, 0, -1, -1, -1, -1, -1},
{0, -1, -1, -1, -1, -1, -1},
};
// clang-format on
currentTime = b.getLatestTime() + seconds(1);
for (int i = 0; i <= kDuration + 1; i++) {
......@@ -692,8 +692,8 @@ TEST(BucketedTimeSeries, queryByInterval) {
<< "i=" << i << ", j=" << j << ", interval=[" << start << ", " << end
<< ")";
double expectedRate = expectedInterval.count() ?
expectedSum / expectedInterval.count() : 0;
double expectedRate =
expectedInterval.count() ? expectedSum / expectedInterval.count() : 0;
EXPECT_EQ(expectedRate, b.rate(start, end))
<< "i=" << i << ", j=" << j << ", interval=[" << start << ", " << end
<< ")";
......@@ -890,21 +890,19 @@ TEST(BucketedTimeSeries, reConstructWithCorruptedData) {
}
namespace IntMHTS {
enum Levels {
MINUTE,
HOUR,
ALLTIME,
NUM_LEVELS,
};
enum Levels {
MINUTE,
HOUR,
ALLTIME,
NUM_LEVELS,
};
const seconds kMinuteHourDurations[] = {
seconds(60), seconds(3600), seconds(0)
};
const seconds kMinuteHourDurations[] = {seconds(60), seconds(3600), seconds(0)};
};
TEST(MinuteHourTimeSeries, Basic) {
folly::MultiLevelTimeSeries<int> mhts(60, IntMHTS::NUM_LEVELS,
IntMHTS::kMinuteHourDurations);
folly::MultiLevelTimeSeries<int> mhts(
60, IntMHTS::NUM_LEVELS, IntMHTS::kMinuteHourDurations);
EXPECT_EQ(mhts.numLevels(), IntMHTS::NUM_LEVELS);
EXPECT_EQ(mhts.numLevels(), 3);
......@@ -943,8 +941,8 @@ TEST(MinuteHourTimeSeries, Basic) {
EXPECT_EQ(mhts.getLevel(IntMHTS::ALLTIME).elapsed().count(), 300);
EXPECT_EQ(mhts.sum(IntMHTS::MINUTE), 600);
EXPECT_EQ(mhts.sum(IntMHTS::HOUR), 300*10);
EXPECT_EQ(mhts.sum(IntMHTS::ALLTIME), 300*10);
EXPECT_EQ(mhts.sum(IntMHTS::HOUR), 300 * 10);
EXPECT_EQ(mhts.sum(IntMHTS::ALLTIME), 300 * 10);
EXPECT_EQ(mhts.avg(IntMHTS::MINUTE), 10);
EXPECT_EQ(mhts.avg(IntMHTS::HOUR), 10);
......@@ -954,18 +952,18 @@ TEST(MinuteHourTimeSeries, Basic) {
EXPECT_EQ(mhts.rate(IntMHTS::HOUR), 10);
EXPECT_EQ(mhts.rate(IntMHTS::ALLTIME), 10);
for (int i = 0; i < 3600*3 - 300; ++i) {
for (int i = 0; i < 3600 * 3 - 300; ++i) {
mhts.addValue(cur_time++, 10);
}
mhts.flush();
EXPECT_EQ(mhts.getLevel(IntMHTS::MINUTE).elapsed().count(), 60);
EXPECT_EQ(mhts.getLevel(IntMHTS::HOUR).elapsed().count(), 3600);
EXPECT_EQ(mhts.getLevel(IntMHTS::ALLTIME).elapsed().count(), 3600*3);
EXPECT_EQ(mhts.getLevel(IntMHTS::ALLTIME).elapsed().count(), 3600 * 3);
EXPECT_EQ(mhts.sum(IntMHTS::MINUTE), 600);
EXPECT_EQ(mhts.sum(IntMHTS::HOUR), 3600*10);
EXPECT_EQ(mhts.sum(IntMHTS::ALLTIME), 3600*3*10);
EXPECT_EQ(mhts.sum(IntMHTS::HOUR), 3600 * 10);
EXPECT_EQ(mhts.sum(IntMHTS::ALLTIME), 3600 * 3 * 10);
EXPECT_EQ(mhts.avg(IntMHTS::MINUTE), 10);
EXPECT_EQ(mhts.avg(IntMHTS::HOUR), 10);
......@@ -980,10 +978,9 @@ TEST(MinuteHourTimeSeries, Basic) {
}
mhts.flush();
EXPECT_EQ(mhts.sum(IntMHTS::MINUTE), 60*100);
EXPECT_EQ(mhts.sum(IntMHTS::HOUR), 3600*100);
EXPECT_EQ(mhts.sum(IntMHTS::ALLTIME),
3600*3*10 + 3600*100);
EXPECT_EQ(mhts.sum(IntMHTS::MINUTE), 60 * 100);
EXPECT_EQ(mhts.sum(IntMHTS::HOUR), 3600 * 100);
EXPECT_EQ(mhts.sum(IntMHTS::ALLTIME), 3600 * 3 * 10 + 3600 * 100);
EXPECT_EQ(mhts.avg(IntMHTS::MINUTE), 100);
EXPECT_EQ(mhts.avg(IntMHTS::HOUR), 100);
......@@ -1000,30 +997,29 @@ TEST(MinuteHourTimeSeries, Basic) {
}
mhts.flush();
EXPECT_EQ(mhts.sum(IntMHTS::MINUTE), 60*120);
EXPECT_EQ(mhts.sum(IntMHTS::HOUR),
1800*100 + 1800*120);
EXPECT_EQ(mhts.sum(IntMHTS::ALLTIME),
3600*3*10 + 3600*100 + 1800*120);
EXPECT_EQ(mhts.sum(IntMHTS::MINUTE), 60 * 120);
EXPECT_EQ(mhts.sum(IntMHTS::HOUR), 1800 * 100 + 1800 * 120);
EXPECT_EQ(
mhts.sum(IntMHTS::ALLTIME), 3600 * 3 * 10 + 3600 * 100 + 1800 * 120);
for (int i = 0; i < 60; ++i) {
mhts.addValue(cur_time++, 1000);
}
mhts.flush();
EXPECT_EQ(mhts.sum(IntMHTS::MINUTE), 60*1000);
EXPECT_EQ(mhts.sum(IntMHTS::HOUR),
1740*100 + 1800*120 + 60*1000);
EXPECT_EQ(mhts.sum(IntMHTS::ALLTIME),
3600*3*10 + 3600*100 + 1800*120 + 60*1000);
EXPECT_EQ(mhts.sum(IntMHTS::MINUTE), 60 * 1000);
EXPECT_EQ(mhts.sum(IntMHTS::HOUR), 1740 * 100 + 1800 * 120 + 60 * 1000);
EXPECT_EQ(
mhts.sum(IntMHTS::ALLTIME),
3600 * 3 * 10 + 3600 * 100 + 1800 * 120 + 60 * 1000);
mhts.clear();
EXPECT_EQ(mhts.sum(IntMHTS::ALLTIME), 0);
}
TEST(MinuteHourTimeSeries, QueryByInterval) {
folly::MultiLevelTimeSeries<int> mhts(60, IntMHTS::NUM_LEVELS,
IntMHTS::kMinuteHourDurations);
folly::MultiLevelTimeSeries<int> mhts(
60, IntMHTS::NUM_LEVELS, IntMHTS::kMinuteHourDurations);
TimePoint curTime;
for (curTime = mkTimePoint(0); curTime < mkTimePoint(7200);
......@@ -1045,27 +1041,37 @@ TEST(MinuteHourTimeSeries, QueryByInterval) {
TimePoint end;
};
TimeInterval intervals[12] = {
{ curTime - seconds(60), curTime },
{ curTime - seconds(3600), curTime },
{ curTime - seconds(7200), curTime },
{ curTime - seconds(3600), curTime - seconds(60) },
{ curTime - seconds(7200), curTime - seconds(60) },
{ curTime - seconds(7200), curTime - seconds(3600) },
{ curTime - seconds(50), curTime - seconds(20) },
{ curTime - seconds(3020), curTime - seconds(20) },
{ curTime - seconds(7200), curTime - seconds(20) },
{ curTime - seconds(3000), curTime - seconds(1000) },
{ curTime - seconds(7200), curTime - seconds(1000) },
{ curTime - seconds(7200), curTime - seconds(3600) },
{curTime - seconds(60), curTime},
{curTime - seconds(3600), curTime},
{curTime - seconds(7200), curTime},
{curTime - seconds(3600), curTime - seconds(60)},
{curTime - seconds(7200), curTime - seconds(60)},
{curTime - seconds(7200), curTime - seconds(3600)},
{curTime - seconds(50), curTime - seconds(20)},
{curTime - seconds(3020), curTime - seconds(20)},
{curTime - seconds(7200), curTime - seconds(20)},
{curTime - seconds(3000), curTime - seconds(1000)},
{curTime - seconds(7200), curTime - seconds(1000)},
{curTime - seconds(7200), curTime - seconds(3600)},
};
int expectedSums[12] = {
6000, 41400, 32400, 35400, 32130, 16200, 3000, 33600, 32310, 20000, 27900,
16200
6000,
41400,
32400,
35400,
32130,
16200,
3000,
33600,
32310,
20000,
27900,
16200,
};
int expectedCounts[12] = {
60, 3600, 7200, 3540, 7140, 3600, 30, 3000, 7180, 2000, 6200, 3600
60, 3600, 7200, 3540, 7140, 3600, 30, 3000, 7180, 2000, 6200, 3600,
};
for (int i = 0; i < 12; ++i) {
......@@ -1078,13 +1084,11 @@ TEST(MinuteHourTimeSeries, QueryByInterval) {
EXPECT_EQ(expectedCounts[i], c);
int a = mhts.avg<int>(interval.start, interval.end);
EXPECT_EQ(expectedCounts[i] ?
(expectedSums[i] / expectedCounts[i]) : 0,
a);
EXPECT_EQ(expectedCounts[i] ? (expectedSums[i] / expectedCounts[i]) : 0, a);
int r = mhts.rate<int>(interval.start, interval.end);
int expectedRate =
expectedSums[i] / (interval.end - interval.start).count();
expectedSums[i] / (interval.end - interval.start).count();
EXPECT_EQ(expectedRate, r);
}
}
......
......@@ -15,11 +15,11 @@
*/
#include <folly/stats/TimeseriesHistogram.h>
#include <folly/stats/TimeseriesHistogram-defs.h>
#include <random>
#include <folly/portability/GTest.h>
#include <folly/stats/TimeseriesHistogram-defs.h>
using namespace std;
using namespace folly;
......@@ -27,30 +27,35 @@ using std::chrono::seconds;
namespace {
namespace IntMTMHTS {
enum Levels {
MINUTE,
TEN_MINUTE,
HOUR,
ALLTIME,
NUM_LEVELS,
};
enum Levels {
MINUTE,
TEN_MINUTE,
HOUR,
ALLTIME,
NUM_LEVELS,
};
const seconds kDurations[] = {
seconds(60), seconds(600), seconds(3600), seconds(0)
};
const seconds kDurations[] = {
seconds(60),
seconds(600),
seconds(3600),
seconds(0),
};
};
namespace IntMHTS {
enum Levels {
MINUTE,
HOUR,
ALLTIME,
NUM_LEVELS,
};
enum Levels {
MINUTE,
HOUR,
ALLTIME,
NUM_LEVELS,
};
const seconds kDurations[] = {
seconds(60), seconds(3600), seconds(0)
};
const seconds kDurations[] = {
seconds(60),
seconds(3600),
seconds(0),
};
};
typedef std::mt19937 RandomInt32;
......@@ -65,10 +70,12 @@ TEST(TimeseriesHistogram, Percentile) {
RandomInt32 random(5);
// [10, 109], 12 buckets including above and below
{
TimeseriesHistogram<int> h(10, 10, 110,
MultiLevelTimeSeries<int>(
60, IntMTMHTS::NUM_LEVELS,
IntMTMHTS::kDurations));
TimeseriesHistogram<int> h(
10,
10,
110,
MultiLevelTimeSeries<int>(
60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
EXPECT_EQ(0, h.getPercentileEstimate(0, IntMTMHTS::ALLTIME));
......@@ -91,8 +98,9 @@ TEST(TimeseriesHistogram, Percentile) {
h.update(mkTimePoint(1500000000));
// bucket 0 stores everything below min, so its minimum
// is the lowest possible number
EXPECT_EQ(std::numeric_limits<int>::min(),
h.getPercentileBucketMin(1, IntMTMHTS::ALLTIME));
EXPECT_EQ(
std::numeric_limits<int>::min(),
h.getPercentileBucketMin(1, IntMTMHTS::ALLTIME));
EXPECT_EQ(110, h.getPercentileBucketMin(99, IntMTMHTS::ALLTIME));
EXPECT_EQ(-2, h.getPercentileEstimate(0, IntMTMHTS::ALLTIME));
......@@ -106,10 +114,12 @@ TEST(TimeseriesHistogram, String) {
RandomInt32 random(5);
// [10, 109], 12 buckets including above and below
{
TimeseriesHistogram<int> hist(10, 10, 110,
MultiLevelTimeSeries<int>(
60, IntMTMHTS::NUM_LEVELS,
IntMTMHTS::kDurations));
TimeseriesHistogram<int> hist(
10,
10,
110,
MultiLevelTimeSeries<int>(
60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
int maxVal = 120;
hist.addValue(mkTimePoint(0), 0);
......@@ -120,14 +130,14 @@ TEST(TimeseriesHistogram, String) {
hist.update(mkTimePoint(0));
const char* const kStringValues1[IntMTMHTS::NUM_LEVELS] = {
"-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
const char* const kStringValues1[IntMTMHTS::NUM_LEVELS] = {
"-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
"70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
"-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
"-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
"70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
"-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
"-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
"70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
"-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
"-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
"70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
};
......@@ -137,14 +147,14 @@ TEST(TimeseriesHistogram, String) {
EXPECT_EQ(kStringValues1[level], hist.getString(level));
}
const char* const kStringValues2[IntMTMHTS::NUM_LEVELS] = {
"-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
const char* const kStringValues2[IntMTMHTS::NUM_LEVELS] = {
"-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
"70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
"-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
"-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
"70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
"-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
"-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
"70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
"-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
"-2147483648:12:4,10:8:13,20:8:24,30:6:34,40:13:46,50:8:54,60:7:64,"
"70:7:74,80:8:84,90:10:94,100:3:103,110:10:115",
};
......@@ -158,10 +168,12 @@ TEST(TimeseriesHistogram, String) {
TEST(TimeseriesHistogram, Clear) {
{
TimeseriesHistogram<int> hist(10, 0, 100,
MultiLevelTimeSeries<int>(
60, IntMTMHTS::NUM_LEVELS,
IntMTMHTS::kDurations));
TimeseriesHistogram<int> hist(
10,
0,
100,
MultiLevelTimeSeries<int>(
60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
for (int now = 0; now < 3600; now++) {
for (int i = 0; i < 100; i++) {
......@@ -193,13 +205,14 @@ TEST(TimeseriesHistogram, Clear) {
}
}
TEST(TimeseriesHistogram, Basic) {
{
TimeseriesHistogram<int> hist(10, 0, 100,
MultiLevelTimeSeries<int>(
60, IntMTMHTS::NUM_LEVELS,
IntMTMHTS::kDurations));
TimeseriesHistogram<int> hist(
10,
0,
100,
MultiLevelTimeSeries<int>(
60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
for (int now = 0; now < 3600; now++) {
for (int i = 0; i < 100; i++) {
......@@ -211,8 +224,8 @@ TEST(TimeseriesHistogram, Basic) {
for (int pct = 1; pct <= 100; pct++) {
int expected = (pct - 1) / 10 * 10;
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::MINUTE));
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct,
IntMTMHTS::TEN_MINUTE));
EXPECT_EQ(
expected, hist.getPercentileBucketMin(pct, IntMTMHTS::TEN_MINUTE));
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::HOUR));
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::ALLTIME));
}
......@@ -224,8 +237,8 @@ TEST(TimeseriesHistogram, Basic) {
EXPECT_EQ(36000, hist.getBucket(b).count(IntMTMHTS::ALLTIME));
}
EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::MINUTE));
EXPECT_EQ(0, hist.getBucket(hist.getNumBuckets() - 1).count(
IntMTMHTS::MINUTE));
EXPECT_EQ(
0, hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::MINUTE));
EXPECT_EQ(6000, hist.count(IntMTMHTS::MINUTE));
EXPECT_EQ(60000, hist.count(IntMTMHTS::TEN_MINUTE));
......@@ -275,10 +288,12 @@ TEST(TimeseriesHistogram, Basic) {
// -----------------
{
TimeseriesHistogram<int> hist(10, 0, 100,
MultiLevelTimeSeries<int>(
60, IntMTMHTS::NUM_LEVELS,
IntMTMHTS::kDurations));
TimeseriesHistogram<int> hist(
10,
0,
100,
MultiLevelTimeSeries<int>(
60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
for (int now = 0; now < 3600; now++) {
for (int i = 0; i < 100; i++) {
......@@ -290,30 +305,32 @@ TEST(TimeseriesHistogram, Basic) {
for (int pct = 1; pct <= 100; pct++) {
int expected = (pct - 1) / 10 * 10;
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::MINUTE));
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct,
IntMTMHTS::TEN_MINUTE));
EXPECT_EQ(
expected, hist.getPercentileBucketMin(pct, IntMTMHTS::TEN_MINUTE));
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::HOUR));
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::ALLTIME));
}
}
for (size_t b = 1; (b + 1) < hist.getNumBuckets(); ++b) {
EXPECT_EQ(600 * 2, hist.getBucket(b).count(IntMTMHTS::MINUTE));
EXPECT_EQ(6000 * 2, hist.getBucket(b).count(IntMTMHTS::TEN_MINUTE));
EXPECT_EQ(36000 * 2, hist.getBucket(b).count(IntMTMHTS::HOUR));
EXPECT_EQ(36000 * 2, hist.getBucket(b).count(IntMTMHTS::ALLTIME));
for (size_t b = 1; (b + 1) < hist.getNumBuckets(); ++b) {
EXPECT_EQ(600 * 2, hist.getBucket(b).count(IntMTMHTS::MINUTE));
EXPECT_EQ(6000 * 2, hist.getBucket(b).count(IntMTMHTS::TEN_MINUTE));
EXPECT_EQ(36000 * 2, hist.getBucket(b).count(IntMTMHTS::HOUR));
EXPECT_EQ(36000 * 2, hist.getBucket(b).count(IntMTMHTS::ALLTIME));
}
EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::MINUTE));
EXPECT_EQ(0, hist.getBucket(hist.getNumBuckets() - 1).count(
IntMTMHTS::MINUTE));
EXPECT_EQ(
0, hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::MINUTE));
}
// -----------------
{
TimeseriesHistogram<int> hist(10, 0, 100,
MultiLevelTimeSeries<int>(
60, IntMTMHTS::NUM_LEVELS,
IntMTMHTS::kDurations));
TimeseriesHistogram<int> hist(
10,
0,
100,
MultiLevelTimeSeries<int>(
60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
for (int now = 0; now < 3600; now++) {
for (int i = 0; i < 50; i++) {
......@@ -325,8 +342,8 @@ TEST(TimeseriesHistogram, Basic) {
for (int pct = 1; pct <= 100; pct++) {
int expected = (pct - 1) / 10 * 10;
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::MINUTE));
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct,
IntMTMHTS::TEN_MINUTE));
EXPECT_EQ(
expected, hist.getPercentileBucketMin(pct, IntMTMHTS::TEN_MINUTE));
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::HOUR));
EXPECT_EQ(expected, hist.getPercentileBucketMin(pct, IntMTMHTS::ALLTIME));
}
......@@ -335,16 +352,15 @@ TEST(TimeseriesHistogram, Basic) {
EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::TEN_MINUTE));
EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::HOUR));
EXPECT_EQ(0, hist.getBucket(0).count(IntMTMHTS::ALLTIME));
EXPECT_EQ(0, hist.getBucket(hist.getNumBuckets() - 1).count(
IntMTMHTS::MINUTE));
EXPECT_EQ(0,
hist.getBucket(hist.getNumBuckets() - 1).
count(IntMTMHTS::TEN_MINUTE));
EXPECT_EQ(0, hist.getBucket(hist.getNumBuckets() - 1).count(
IntMTMHTS::HOUR));
EXPECT_EQ(0,
hist.getBucket(hist.getNumBuckets() - 1).count(
IntMTMHTS::ALLTIME));
EXPECT_EQ(
0, hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::MINUTE));
EXPECT_EQ(
0,
hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::TEN_MINUTE));
EXPECT_EQ(
0, hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::HOUR));
EXPECT_EQ(
0, hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::ALLTIME));
for (size_t b = 1; (b + 1) < hist.getNumBuckets(); ++b) {
EXPECT_EQ(600, hist.getBucket(b).count(IntMTMHTS::MINUTE));
......@@ -357,18 +373,18 @@ TEST(TimeseriesHistogram, Basic) {
hist.addValue(mkTimePoint(3599), 200 + i);
}
hist.update(mkTimePoint(3599));
EXPECT_EQ(100,
hist.getBucket(hist.getNumBuckets() - 1).count(
IntMTMHTS::ALLTIME));
EXPECT_EQ(
100,
hist.getBucket(hist.getNumBuckets() - 1).count(IntMTMHTS::ALLTIME));
}
}
TEST(TimeseriesHistogram, QueryByInterval) {
TimeseriesHistogram<int> mhts(8, 8, 120,
MultiLevelTimeSeries<int>(
60, IntMHTS::NUM_LEVELS,
IntMHTS::kDurations));
TimeseriesHistogram<int> mhts(
8,
8,
120,
MultiLevelTimeSeries<int>(60, IntMHTS::NUM_LEVELS, IntMHTS::kDurations));
mhts.update(mkTimePoint(0));
......@@ -392,27 +408,37 @@ TEST(TimeseriesHistogram, QueryByInterval) {
StatsClock::time_point end;
};
TimeInterval intervals[12] = {
{ curTime - 60, curTime },
{ curTime - 3600, curTime },
{ curTime - 7200, curTime },
{ curTime - 3600, curTime - 60 },
{ curTime - 7200, curTime - 60 },
{ curTime - 7200, curTime - 3600 },
{ curTime - 50, curTime - 20 },
{ curTime - 3020, curTime - 20 },
{ curTime - 7200, curTime - 20 },
{ curTime - 3000, curTime - 1000 },
{ curTime - 7200, curTime - 1000 },
{ curTime - 7200, curTime - 3600 },
{curTime - 60, curTime},
{curTime - 3600, curTime},
{curTime - 7200, curTime},
{curTime - 3600, curTime - 60},
{curTime - 7200, curTime - 60},
{curTime - 7200, curTime - 3600},
{curTime - 50, curTime - 20},
{curTime - 3020, curTime - 20},
{curTime - 7200, curTime - 20},
{curTime - 3000, curTime - 1000},
{curTime - 7200, curTime - 1000},
{curTime - 7200, curTime - 3600},
};
int expectedSums[12] = {
6000, 41400, 32400, 35400, 32129, 16200, 3000, 33600, 32308, 20000, 27899,
16200
6000,
41400,
32400,
35400,
32129,
16200,
3000,
33600,
32308,
20000,
27899,
16200,
};
int expectedCounts[12] = {
60, 3600, 7200, 3540, 7139, 3600, 30, 3000, 7178, 2000, 6199, 3600
60, 3600, 7200, 3540, 7139, 3600, 30, 3000, 7178, 2000, 6199, 3600,
};
// The first 7200 values added all fell below the histogram minimum,
......@@ -421,18 +447,18 @@ TEST(TimeseriesHistogram, QueryByInterval) {
int belowMinBucket = std::numeric_limits<int>::min();
int expectedValues[12][3] = {
{96, 96, 96},
{ 8, 8, 96},
{ belowMinBucket, belowMinBucket, 8}, // alltime
{ 8, 8, 8},
{ belowMinBucket, belowMinBucket, 8}, // alltime
{ belowMinBucket, belowMinBucket, 8}, // alltime
{96, 96, 96},
{ 8, 8, 96},
{ belowMinBucket, belowMinBucket, 8}, // alltime
{ 8, 8, 8},
{ belowMinBucket, belowMinBucket, 8}, // alltime
{ belowMinBucket, belowMinBucket, 8} // alltime
{96, 96, 96},
{8, 8, 96},
{belowMinBucket, belowMinBucket, 8}, // alltime
{8, 8, 8},
{belowMinBucket, belowMinBucket, 8}, // alltime
{belowMinBucket, belowMinBucket, 8}, // alltime
{96, 96, 96},
{8, 8, 96},
{belowMinBucket, belowMinBucket, 8}, // alltime
{8, 8, 8},
{belowMinBucket, belowMinBucket, 8}, // alltime
{belowMinBucket, belowMinBucket, 8} // alltime
};
for (int i = 0; i < 12; i++) {
......@@ -502,10 +528,12 @@ TEST(TimeseriesHistogram, SingleUniqueValue) {
int values[] = {-1, 0, 500, 1000, 1500};
for (int ii = 0; ii < 5; ++ii) {
int value = values[ii];
TimeseriesHistogram<int> h(10, 0, 1000,
MultiLevelTimeSeries<int>(
60, IntMTMHTS::NUM_LEVELS,
IntMTMHTS::kDurations));
TimeseriesHistogram<int> h(
10,
0,
1000,
MultiLevelTimeSeries<int>(
60, IntMTMHTS::NUM_LEVELS, IntMTMHTS::kDurations));
const int kNumIters = 1000;
for (int jj = 0; jj < kNumIters; ++jj) {
......@@ -520,20 +548,20 @@ TEST(TimeseriesHistogram, SingleUniqueValue) {
// Things get trickier if there are multiple unique values.
const int kNewValue = 750;
for (int kk = 0; kk < 2*kNumIters; ++kk) {
for (int kk = 0; kk < 2 * kNumIters; ++kk) {
h.addValue(mkTimePoint(1), kNewValue);
}
h.update(mkTimePoint(1));
EXPECT_NEAR(h.getPercentileEstimate(50, 0), kNewValue+5, 5);
EXPECT_NEAR(h.getPercentileEstimate(50, 0), kNewValue + 5, 5);
if (value >= 0 && value <= 1000) {
// only do further testing if value is within our bucket range,
// else estimates can be wildly off
if (kNewValue > value) {
EXPECT_NEAR(h.getPercentileEstimate(10, 0), value+5, 5);
EXPECT_NEAR(h.getPercentileEstimate(99, 0), kNewValue+5, 5);
EXPECT_NEAR(h.getPercentileEstimate(10, 0), value + 5, 5);
EXPECT_NEAR(h.getPercentileEstimate(99, 0), kNewValue + 5, 5);
} else {
EXPECT_NEAR(h.getPercentileEstimate(10, 0), kNewValue+5, 5);
EXPECT_NEAR(h.getPercentileEstimate(99, 0), value+5, 5);
EXPECT_NEAR(h.getPercentileEstimate(10, 0), kNewValue + 5, 5);
EXPECT_NEAR(h.getPercentileEstimate(99, 0), value + 5, 5);
}
}
}
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment