Merge branch 'ismaelJimenez-complexity'
diff --git a/CONTRIBUTORS b/CONTRIBUTORS
index a575ef1..af40292 100644
--- a/CONTRIBUTORS
+++ b/CONTRIBUTORS
@@ -28,6 +28,7 @@
 Christopher Seymour <chris.j.seymour@hotmail.com>
 David Coeurjolly <david.coeurjolly@liris.cnrs.fr>
 Dominic Hamon <dma@stripysock.com>
+Eric Fiselier <eric@efcs.ca>
 Eugene Zhuk <eugene.zhuk@gmail.com>
 Evgeny Safronov <division494@gmail.com>
 Felix Homann <linuxaudio@showlabor.de>
diff --git a/cmake/CXXFeatureCheck.cmake b/cmake/CXXFeatureCheck.cmake
index 23ee8ac..3059024 100644
--- a/cmake/CXXFeatureCheck.cmake
+++ b/cmake/CXXFeatureCheck.cmake
@@ -21,12 +21,15 @@
   string(TOLOWER ${FILE} FILE)
   string(TOUPPER ${FILE} VAR)
   string(TOUPPER "HAVE_${VAR}" FEATURE)
+  if (DEFINED HAVE_${VAR})
+    return()
+  endif()
   message("-- Performing Test ${FEATURE}")
   try_run(RUN_${FEATURE} COMPILE_${FEATURE}
           ${CMAKE_BINARY_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/cmake/${FILE}.cpp)
   if(RUN_${FEATURE} EQUAL 0)
     message("-- Performing Test ${FEATURE} -- success")
-    set(HAVE_${VAR} 1 PARENT_SCOPE)
+    set(HAVE_${VAR} 1 CACHE INTERNAL "Feature test for ${FILE}" PARENT_SCOPE)
     add_definitions(-DHAVE_${VAR})
   else()
     if(NOT COMPILE_${FEATURE})
diff --git a/include/benchmark/benchmark_api.h b/include/benchmark/benchmark_api.h
index 04ec624..bf46d97 100644
--- a/include/benchmark/benchmark_api.h
+++ b/include/benchmark/benchmark_api.h
@@ -161,10 +161,17 @@
 
 void Initialize(int* argc, char** argv);
 
-// Otherwise, run all benchmarks specified by the --benchmark_filter flag,
-// and exit after running the benchmarks.
-void RunSpecifiedBenchmarks();
-void RunSpecifiedBenchmarks(BenchmarkReporter* reporter);
+// Generate a list of benchmarks matching the specified --benchmark_filter flag
+// and if --benchmark_list_tests is specified return after printing the name
+// of each matching benchmark. Otherwise run each matching benchmark and
+// report the results.
+//
+// The second overload reports the results using the specified 'reporter'.
+//
+// RETURNS: The number of matching benchmarks.
+size_t RunSpecifiedBenchmarks();
+size_t RunSpecifiedBenchmarks(BenchmarkReporter* reporter);
+
 
 // If this routine is called, peak memory allocation past this point in the
 // benchmark is reported at the end of the benchmark report line. (It is
@@ -236,22 +243,25 @@
 // benchmark to use.
 class State {
 public:
-  State(size_t max_iters, bool has_x, int x, bool has_y, int y, int thread_i, int n_threads);
+  State(size_t max_iters, bool has_x, int x, bool has_y, int y,
+        int thread_i, int n_threads);
 
   // Returns true iff the benchmark should continue through another iteration.
   // NOTE: A benchmark may not return from the test until KeepRunning() has
   // returned false.
   bool KeepRunning() {
     if (BENCHMARK_BUILTIN_EXPECT(!started_, false)) {
-        ResumeTiming();
+        assert(!finished_);
         started_ = true;
+        ResumeTiming();
     }
     bool const res = total_iterations_++ < max_iterations;
     if (BENCHMARK_BUILTIN_EXPECT(!res, false)) {
-        assert(started_);
+        assert(started_ && !finished_);
         PauseTiming();
         // Total iterations now is one greater than max iterations. Fix this.
         total_iterations_ = max_iterations;
+        finished_ = true;
     }
     return res;
   }
@@ -385,6 +395,7 @@
 
 private:
   bool started_;
+  bool finished_;
   size_t total_iterations_;
 
   bool has_range_x_;
diff --git a/src/benchmark.cc b/src/benchmark.cc
index 84f88ed..15274d8 100644
--- a/src/benchmark.cc
+++ b/src/benchmark.cc
@@ -846,7 +846,7 @@
 
 State::State(size_t max_iters, bool has_x, int x, bool has_y, int y,
              int thread_i, int n_threads)
-    : started_(false), total_iterations_(0),
+    : started_(false), finished_(false), total_iterations_(0),
       has_range_x_(has_x), range_x_(x),
       has_range_y_(has_y), range_y_(y),
       bytes_processed_(0), items_processed_(0),
@@ -862,11 +862,13 @@
 void State::PauseTiming() {
   // Add in time accumulated so far
   CHECK(running_benchmark);
+  CHECK(started_ && !finished_);
   timer_manager->StopTimer();
 }
 
 void State::ResumeTiming() {
   CHECK(running_benchmark);
+  CHECK(started_ && !finished_);
   timer_manager->StartTimer();
 }
 
@@ -885,24 +887,9 @@
 namespace internal {
 namespace {
 
-void PrintBenchmarkList() {
-  std::vector<Benchmark::Instance> benchmarks;
-  auto families = BenchmarkFamilies::GetInstance();
-  if (!families->FindBenchmarks(".", &benchmarks)) return;
-
-  for (const internal::Benchmark::Instance& benchmark : benchmarks) {
-    std::cout <<  benchmark.name << "\n";
-  }
-}
-
-void RunMatchingBenchmarks(const std::string& spec,
+void RunMatchingBenchmarks(const std::vector<Benchmark::Instance>& benchmarks,
                            BenchmarkReporter* reporter) {
   CHECK(reporter != nullptr);
-  if (spec.empty()) return;
-
-  std::vector<Benchmark::Instance> benchmarks;
-  auto families = BenchmarkFamilies::GetInstance();
-  if (!families->FindBenchmarks(spec, &benchmarks)) return;
 
   // Determine the width of the name field using a minimum width of 10.
   size_t name_field_width = 10;
@@ -948,26 +935,32 @@
 } // end namespace
 } // end namespace internal
 
-void RunSpecifiedBenchmarks() {
-  RunSpecifiedBenchmarks(nullptr);
+size_t RunSpecifiedBenchmarks() {
+  return RunSpecifiedBenchmarks(nullptr);
 }
 
-void RunSpecifiedBenchmarks(BenchmarkReporter* reporter) {
-  if (FLAGS_benchmark_list_tests) {
-    internal::PrintBenchmarkList();
-    return;
-  }
+size_t RunSpecifiedBenchmarks(BenchmarkReporter* reporter) {
   std::string spec = FLAGS_benchmark_filter;
   if (spec.empty() || spec == "all")
     spec = ".";  // Regexp that matches all benchmarks
 
-  std::unique_ptr<BenchmarkReporter> default_reporter;
-  if (!reporter) {
-    default_reporter = internal::GetDefaultReporter();
-    reporter = default_reporter.get();
+  std::vector<internal::Benchmark::Instance> benchmarks;
+  auto families = internal::BenchmarkFamilies::GetInstance();
+  if (!families->FindBenchmarks(spec, &benchmarks)) return 0;
+
+  if (FLAGS_benchmark_list_tests) {
+    for (auto const& benchmark : benchmarks)
+      std::cout <<  benchmark.name << "\n";
+  } else {
+    std::unique_ptr<BenchmarkReporter> default_reporter;
+    if (!reporter) {
+      default_reporter = internal::GetDefaultReporter();
+      reporter = default_reporter.get();
+    }
+    internal::RunMatchingBenchmarks(benchmarks, reporter);
+    reporter->Finalize();
   }
-  internal::RunMatchingBenchmarks(spec, reporter);
-  reporter->Finalize();
+  return benchmarks.size();
 }
 
 namespace internal {
diff --git a/src/check.h b/src/check.h
index d2c1fda..cb49c49 100644
--- a/src/check.h
+++ b/src/check.h
@@ -10,6 +10,18 @@
 namespace benchmark {
 namespace internal {
 
+typedef void(AbortHandlerT)();
+
+inline AbortHandlerT*& GetAbortHandler() {
+    static AbortHandlerT* handler = &std::abort;
+    return handler;
+}
+
+BENCHMARK_NORETURN inline void CallAbortHandler() {
+    GetAbortHandler()();
+    std::abort(); // fallback to enforce noreturn
+}
+
 // CheckHandler is the class constructed by failing CHECK macros. CheckHandler
 // will log information about the failures and abort when it is destructed.
 class CheckHandler {
@@ -25,13 +37,13 @@
     return log_;
   }
 
-  BENCHMARK_NORETURN ~CheckHandler() {
+  BENCHMARK_NORETURN ~CheckHandler() noexcept(false) {
       log_ << std::endl;
-      std::abort();
+      CallAbortHandler();
   }
 
-  CheckHandler & operator=(const CheckHandler&) = delete;

-  CheckHandler(const CheckHandler&) = delete;

+  CheckHandler & operator=(const CheckHandler&) = delete;
+  CheckHandler(const CheckHandler&) = delete;
   CheckHandler() = delete;
 private:
   std::ostream& log_;
diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt
index 1bc9dfe..49d1052 100644
--- a/test/CMakeLists.txt
+++ b/test/CMakeLists.txt
@@ -2,10 +2,6 @@
 
 find_package(Threads REQUIRED)
 
-set(CXX03_FLAGS "${CMAKE_CXX_FLAGS}")
-string(REPLACE "-std=c++11" "-std=c++03" CXX03_FLAGS "${CXX03_FLAGS}")
-string(REPLACE "-std=c++0x" "-std=c++03" CXX03_FLAGS "${CXX03_FLAGS}")
-
 macro(compile_benchmark_test name)
   add_executable(${name} "${name}.cc")
   target_link_libraries(${name} benchmark ${CMAKE_THREAD_LIBS_INIT})
@@ -18,6 +14,7 @@
 compile_benchmark_test(filter_test)
 macro(add_filter_test name filter expect)
   add_test(${name} filter_test --benchmark_min_time=0.01 --benchmark_filter=${filter} ${expect})
+  add_test(${name}_list_only filter_test --benchmark_list_tests --benchmark_filter=${filter} ${expect})
 endmacro(add_filter_test)
 
 add_filter_test(filter_simple "Foo" 3)
@@ -36,16 +33,27 @@
 compile_benchmark_test(basic_test)
 add_test(basic_benchmark basic_test --benchmark_min_time=0.01)
 
+compile_benchmark_test(diagnostics_test)
+add_test(diagnostics_test diagnostics_test --benchmark_min_time=0.01)
+
 compile_benchmark_test(fixture_test)
 add_test(fixture_test fixture_test --benchmark_min_time=0.01)
 
 compile_benchmark_test(map_test)
 add_test(map_test map_test --benchmark_min_time=0.01)
 
-compile_benchmark_test(cxx03_test)
-set_target_properties(cxx03_test
-    PROPERTIES COMPILE_FLAGS "${CXX03_FLAGS}")
-add_test(cxx03 cxx03_test --benchmark_min_time=0.01)
+
+check_cxx_compiler_flag(-std=c++03 BENCHMARK_HAS_CXX03_FLAG)
+if (BENCHMARK_HAS_CXX03_FLAG)
+  set(CXX03_FLAGS "${CMAKE_CXX_FLAGS}")
+  string(REPLACE "-std=c++11" "-std=c++03" CXX03_FLAGS "${CXX03_FLAGS}")
+  string(REPLACE "-std=c++0x" "-std=c++03" CXX03_FLAGS "${CXX03_FLAGS}")
+
+  compile_benchmark_test(cxx03_test)
+  set_target_properties(cxx03_test
+      PROPERTIES COMPILE_FLAGS "${CXX03_FLAGS}")
+  add_test(cxx03 cxx03_test --benchmark_min_time=0.01)
+endif()
 
 compile_benchmark_test(complexity_test)
 add_test(complexity_benchmark complexity_test --benchmark_min_time=0.01)
diff --git a/test/diagnostics_test.cc b/test/diagnostics_test.cc
new file mode 100644
index 0000000..60fa3b1
--- /dev/null
+++ b/test/diagnostics_test.cc
@@ -0,0 +1,61 @@
+// Testing:
+//   State::PauseTiming()
+//   State::ResumeTiming()
+// Test that CHECK's within these function diagnose when they are called
+// outside of the KeepRunning() loop.
+//
+// NOTE: Users should NOT include or use src/check.h. This is only done in
+// order to test library internals.
+
+#include "benchmark/benchmark_api.h"
+#include "../src/check.h"
+#include <stdexcept>
+#include <cstdlib>
+
+#if defined(__GNUC__) && !defined(__EXCEPTIONS)
+#define TEST_HAS_NO_EXCEPTIONS
+#endif
+
+void TestHandler() {
+#ifndef TEST_HAS_NO_EXCEPTIONS
+  throw std::logic_error("");
+#else
+  std::abort();
+#endif
+}
+
+void try_invalid_pause_resume(benchmark::State& state) {
+#if !defined(NDEBUG) && !defined(TEST_HAS_NO_EXCEPTIONS)
+  try {
+    state.PauseTiming();
+    std::abort();
+  } catch (std::logic_error const&) {}
+  try {
+    state.ResumeTiming();
+    std::abort();
+  } catch (std::logic_error const&) {}
+#else
+  (void)state; // avoid unused warning
+#endif
+}
+
+void BM_diagnostic_test(benchmark::State& state) {
+  static bool called_once = false;
+
+  if (called_once == false) try_invalid_pause_resume(state);
+
+  while (state.KeepRunning()) {
+    benchmark::DoNotOptimize(state.iterations());
+  }
+
+  if (called_once == false) try_invalid_pause_resume(state);
+
+  called_once = true;
+}
+BENCHMARK(BM_diagnostic_test);
+
+int main(int argc, char** argv) {
+  benchmark::internal::GetAbortHandler() = &TestHandler;
+  benchmark::Initialize(&argc, argv);
+  benchmark::RunSpecifiedBenchmarks();
+}
diff --git a/test/filter_test.cc b/test/filter_test.cc
index 2a278ff..0ba4071 100644
--- a/test/filter_test.cc
+++ b/test/filter_test.cc
@@ -68,24 +68,38 @@
 
 
 
-int main(int argc, char* argv[]) {
+int main(int argc, char** argv) {
+  bool list_only = false;
+  for (int i=0; i < argc; ++i)
+    list_only |= std::string(argv[i]).find("--benchmark_list_tests") != std::string::npos;
+
   benchmark::Initialize(&argc, argv);
 
   TestReporter test_reporter;
-  benchmark::RunSpecifiedBenchmarks(&test_reporter);
+  const size_t returned_count = benchmark::RunSpecifiedBenchmarks(&test_reporter);
 
   if (argc == 2) {
     // Make sure we ran all of the tests
     std::stringstream ss(argv[1]);
-    size_t expected;
-    ss >> expected;
+    size_t expected_return;
+    ss >> expected_return;
 
-    const size_t count = test_reporter.GetCount();
-    if (count != expected) {
-      std::cerr << "ERROR: Expected " << expected << " tests to be ran but only "
-                << count << " completed" << std::endl;
+    if (returned_count != expected_return) {
+      std::cerr << "ERROR: Expected " << expected_return
+                << " tests to match the filter but returned_count = "
+                << returned_count << std::endl;
+      return -1;
+    }
+
+    const size_t expected_reports = list_only ? 0 : expected_return;
+    const size_t reports_count = test_reporter.GetCount();
+    if (reports_count != expected_reports) {
+      std::cerr << "ERROR: Expected " << expected_reports
+                << " tests to be run but reported_count = " << reports_count
+                << std::endl;
       return -1;
     }
   }
+
   return 0;
 }