X-Git-Url: http://plrg.eecs.uci.edu/git/?a=blobdiff_plain;f=folly%2FBenchmark.h;h=3478d10f11cf22ac3ce428290cbc2fa0b1c2c29e;hb=589052a636a717bbf89dfc5c25588cbdaac080a8;hp=3465fa1c07ff7c4e33c7ea9d5340cdee7c31a751;hpb=be60435ff726fa2528e2857d9003e09e53596d86;p=folly.git diff --git a/folly/Benchmark.h b/folly/Benchmark.h index 3465fa1c..3478d10f 100644 --- a/folly/Benchmark.h +++ b/folly/Benchmark.h @@ -1,5 +1,5 @@ /* - * Copyright 2014 Facebook, Inc. + * Copyright 2015 Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,8 +17,9 @@ #ifndef FOLLY_BENCHMARK_H_ #define FOLLY_BENCHMARK_H_ -#include "folly/Portability.h" -#include "folly/Preprocessor.h" // for FB_ANONYMOUS_VARIABLE +#include +#include +#include // for FB_ANONYMOUS_VARIABLE #include #include #include @@ -26,6 +27,7 @@ #include #include #include +#include DECLARE_bool(benchmark); @@ -56,13 +58,15 @@ namespace detail { */ enum Clock { DEFAULT_CLOCK_ID = CLOCK_REALTIME }; +typedef std::pair TimeIterPair; + /** * Adds a benchmark wrapped in a std::function. Only used * internally. Pass by value is intentional. */ void addBenchmarkImpl(const char* file, const char* name, - std::function); + std::function); /** * Takes the difference between two timespec values. end is assumed to @@ -73,10 +77,11 @@ inline uint64_t timespecDiff(timespec end, timespec start) { assert(end.tv_nsec >= start.tv_nsec); return end.tv_nsec - start.tv_nsec; } - assert(end.tv_sec > start.tv_sec && - end.tv_sec - start.tv_sec < + assert(end.tv_sec > start.tv_sec); + auto diff = uint64_t(end.tv_sec - start.tv_sec); + assert(diff < std::numeric_limits::max() / 1000000000UL); - return (end.tv_sec - start.tv_sec) * 1000000000UL + return diff * 1000000000UL + end.tv_nsec - start.tv_nsec; } @@ -114,7 +119,7 @@ struct BenchmarkSuspender { } BenchmarkSuspender(const BenchmarkSuspender &) = delete; - BenchmarkSuspender(BenchmarkSuspender && rhs) { + BenchmarkSuspender(BenchmarkSuspender && rhs) noexcept { start = rhs.start; rhs.start.tv_nsec = rhs.start.tv_sec = 0; } @@ -146,13 +151,19 @@ struct BenchmarkSuspender { CHECK_EQ(0, clock_gettime(detail::DEFAULT_CLOCK_ID, &start)); } + template + auto dismissing(F f) -> typename std::result_of::type { + SCOPE_EXIT { rehire(); }; + dismiss(); + return f(); + } + /** - * This helps the macro definition. To get around the dangers of - * operator bool, returns a pointer to member (which allows no - * arithmetic). + * This is for use inside of if-conditions, used in BENCHMARK macros. + * If-conditions bypass the explicit on operator bool. */ - operator int BenchmarkSuspender::*() const { - return nullptr; + explicit operator bool() const { + return false; } /** @@ -185,24 +196,27 @@ typename std::enable_if< == 2 >::type addBenchmark(const char* file, const char* name, Lambda&& lambda) { - auto execute = [=](unsigned int times) -> uint64_t { + auto execute = [=](unsigned int times) { BenchmarkSuspender::nsSpent = 0; timespec start, end; + unsigned int niter; // CORE MEASUREMENT STARTS auto const r1 = clock_gettime(detail::DEFAULT_CLOCK_ID, &start); - lambda(times); + niter = lambda(times); auto const r2 = clock_gettime(detail::DEFAULT_CLOCK_ID, &end); // CORE MEASUREMENT ENDS CHECK_EQ(0, r1); CHECK_EQ(0, r2); - return detail::timespecDiff(end, start) - BenchmarkSuspender::nsSpent; + return detail::TimeIterPair( + detail::timespecDiff(end, start) - BenchmarkSuspender::nsSpent, + niter); }; detail::addBenchmarkImpl(file, name, - std::function(execute)); + std::function(execute)); } /** @@ -218,9 +232,11 @@ typename std::enable_if< >::type addBenchmark(const char* file, const char* name, Lambda&& lambda) { addBenchmark(file, name, [=](unsigned int times) { + unsigned int niter = 0; while (times-- > 0) { - lambda(); + niter += lambda(); } + return niter; }); } @@ -254,20 +270,34 @@ void doNotOptimizeAway(T&& datum) { * Introduces a benchmark function. Used internally, see BENCHMARK and * friends below. */ -#define BENCHMARK_IMPL(funName, stringName, paramType, paramName) \ +#define BENCHMARK_IMPL(funName, stringName, rv, paramType, paramName) \ static void funName(paramType); \ static bool FB_ANONYMOUS_VARIABLE(follyBenchmarkUnused) = ( \ ::folly::addBenchmark(__FILE__, stringName, \ - [](paramType paramName) { funName(paramName); }), \ + [](paramType paramName) -> unsigned { funName(paramName); \ + return rv; }), \ true); \ static void funName(paramType paramName) /** - * Introduces a benchmark function. Use with either one one or two - * arguments. The first is the name of the benchmark. Use something - * descriptive, such as insertVectorBegin. The second argument may be - * missing, or could be a symbolic counter. The counter dictates how - * many internal iteration the benchmark does. Example: + * Introduces a benchmark function with support for returning the actual + * number of iterations. Used internally, see BENCHMARK_MULTI and friends + * below. + */ +#define BENCHMARK_MULTI_IMPL(funName, stringName, paramType, paramName) \ + static unsigned funName(paramType); \ + static bool FB_ANONYMOUS_VARIABLE(follyBenchmarkUnused) = ( \ + ::folly::addBenchmark(__FILE__, stringName, \ + [](paramType paramName) { return funName(paramName); }), \ + true); \ + static unsigned funName(paramType paramName) + +/** + * Introduces a benchmark function. Use with either one or two arguments. + * The first is the name of the benchmark. Use something descriptive, such + * as insertVectorBegin. The second argument may be missing, or could be a + * symbolic counter. The counter dictates how many internal iteration the + * benchmark does. Example: * * BENCHMARK(vectorPushBack) { * vector v; @@ -283,6 +313,29 @@ void doNotOptimizeAway(T&& datum) { */ #define BENCHMARK(name, ...) \ BENCHMARK_IMPL( \ + name, \ + FB_STRINGIZE(name), \ + FB_ARG_2_OR_1(1, ## __VA_ARGS__), \ + FB_ONE_OR_NONE(unsigned, ## __VA_ARGS__), \ + __VA_ARGS__) + +/** + * Like BENCHMARK above, but allows the user to return the actual + * number of iterations executed in the function body. This can be + * useful if the benchmark function doesn't know upfront how many + * iterations it's going to run or if it runs through a certain + * number of test cases, e.g.: + * + * BENCHMARK_MULTI(benchmarkSomething) { + * std::vector testCases { 0, 1, 1, 2, 3, 5 }; + * for (int c : testCases) { + * doSomething(c); + * } + * return testCases.size(); + * } + */ +#define BENCHMARK_MULTI(name, ...) \ + BENCHMARK_MULTI_IMPL( \ name, \ FB_STRINGIZE(name), \ FB_ONE_OR_NONE(unsigned, ## __VA_ARGS__), \ @@ -313,6 +366,13 @@ void doNotOptimizeAway(T&& datum) { #define BENCHMARK_PARAM(name, param) \ BENCHMARK_NAMED_PARAM(name, param, param) +/** + * Same as BENCHMARK_PARAM, but allows to return the actual number of + * iterations that have been run. + */ +#define BENCHMARK_PARAM_MULTI(name, param) \ + BENCHMARK_NAMED_PARAM_MULTI(name, param, param) + /* * Like BENCHMARK_PARAM(), but allows a custom name to be specified for each * parameter, rather than using the parameter value. @@ -340,11 +400,25 @@ void doNotOptimizeAway(T&& datum) { BENCHMARK_IMPL( \ FB_CONCATENATE(name, FB_CONCATENATE(_, param_name)), \ FB_STRINGIZE(name) "(" FB_STRINGIZE(param_name) ")", \ + iters, \ unsigned, \ iters) { \ name(iters, ## __VA_ARGS__); \ } +/** + * Same as BENCHMARK_NAMED_PARAM, but allows to return the actual number + * of iterations that have been run. + */ +#define BENCHMARK_NAMED_PARAM_MULTI(name, param_name, ...) \ + BENCHMARK_MULTI_IMPL( \ + FB_CONCATENATE(name, FB_CONCATENATE(_, param_name)), \ + FB_STRINGIZE(name) "(" FB_STRINGIZE(param_name) ")", \ + unsigned, \ + iters) { \ + return name(iters, ## __VA_ARGS__); \ + } + /** * Just like BENCHMARK, but prints the time relative to a * baseline. The baseline is the most recent BENCHMARK() seen in @@ -371,6 +445,18 @@ void doNotOptimizeAway(T&& datum) { */ #define BENCHMARK_RELATIVE(name, ...) \ BENCHMARK_IMPL( \ + name, \ + "%" FB_STRINGIZE(name), \ + FB_ARG_2_OR_1(1, ## __VA_ARGS__), \ + FB_ONE_OR_NONE(unsigned, ## __VA_ARGS__), \ + __VA_ARGS__) + +/** + * Same as BENCHMARK_RELATIVE, but allows to return the actual number + * of iterations that have been run. + */ +#define BENCHMARK_RELATIVE_MULTI(name, ...) \ + BENCHMARK_MULTI_IMPL( \ name, \ "%" FB_STRINGIZE(name), \ FB_ONE_OR_NONE(unsigned, ## __VA_ARGS__), \ @@ -382,6 +468,13 @@ void doNotOptimizeAway(T&& datum) { #define BENCHMARK_RELATIVE_PARAM(name, param) \ BENCHMARK_RELATIVE_NAMED_PARAM(name, param, param) +/** + * Same as BENCHMARK_RELATIVE_PARAM, but allows to return the actual + * number of iterations that have been run. + */ +#define BENCHMARK_RELATIVE_PARAM_MULTI(name, param) \ + BENCHMARK_RELATIVE_NAMED_PARAM_MULTI(name, param, param) + /** * A combination of BENCHMARK_RELATIVE and BENCHMARK_NAMED_PARAM. */ @@ -389,17 +482,31 @@ void doNotOptimizeAway(T&& datum) { BENCHMARK_IMPL( \ FB_CONCATENATE(name, FB_CONCATENATE(_, param_name)), \ "%" FB_STRINGIZE(name) "(" FB_STRINGIZE(param_name) ")", \ + iters, \ unsigned, \ iters) { \ name(iters, ## __VA_ARGS__); \ } +/** + * Same as BENCHMARK_RELATIVE_NAMED_PARAM, but allows to return the + * actual number of iterations that have been run. + */ +#define BENCHMARK_RELATIVE_NAMED_PARAM_MULTI(name, param_name, ...) \ + BENCHMARK_MULTI_IMPL( \ + FB_CONCATENATE(name, FB_CONCATENATE(_, param_name)), \ + "%" FB_STRINGIZE(name) "(" FB_STRINGIZE(param_name) ")", \ + unsigned, \ + iters) { \ + return name(iters, ## __VA_ARGS__); \ + } + /** * Draws a line of dashes. */ -#define BENCHMARK_DRAW_LINE() \ - static bool FB_ANONYMOUS_VARIABLE(follyBenchmarkUnused) = ( \ - ::folly::addBenchmark(__FILE__, "-", []() { }), \ +#define BENCHMARK_DRAW_LINE() \ + static bool FB_ANONYMOUS_VARIABLE(follyBenchmarkUnused) = ( \ + ::folly::addBenchmark(__FILE__, "-", []() -> unsigned { return 0; }), \ true); /**