[runtests] Add a flag for per-test timeouts

Bug: 39351
Change-Id: Ic30b40881fc9561764f38cd574072df09a686f5f
diff --git a/zircon/system/ulib/runtests-utils/discover-and-run-tests.cc b/zircon/system/ulib/runtests-utils/discover-and-run-tests.cc
index 3d86281..04e6790 100644
--- a/zircon/system/ulib/runtests-utils/discover-and-run-tests.cc
+++ b/zircon/system/ulib/runtests-utils/discover-and-run-tests.cc
@@ -78,6 +78,7 @@
           "   -w: Watchdog timeout [5]                           \n"
           "       (accepts the timeout value in seconds)         \n"
           "       The default is up to each test.                \n"
+          "   -i: Per-test timeout in seconds. [6]               \n"
           "\n"
           "[1] -v will pass \"v=1\" argument to the test binary. \n"
           "    Not all test frameworks will honor this argument, \n"
@@ -104,7 +105,12 @@
           "\n"
           "[5] The watchdog timeout option -w only works for     \n"
           "    tests that support the RUNTESTS_WATCHDOG_TIMEOUT  \n"
-          "    environment variable.                             \n");
+          "    environment variable.                             \n"
+          "\n"
+          "[6] Will consider tests failed if they don't          \n"
+          "    finish in this time. If > 1, watchdog timeout     \n"
+          "    will be set to (this value - 1) in order to give  \n"
+          "    tests a chance to clean up and fail cleanly.      \n");
   return EXIT_FAILURE;
 }
 }  // namespace
@@ -119,6 +125,7 @@
   const char* output_dir = nullptr;
   signed char verbosity = -1;
   int watchdog_timeout_seconds = -1;
+  unsigned int timeout_seconds = 0;
   const char* test_list_path = nullptr;
   int repeat = 1;
   bool dry_run = false;
@@ -220,9 +227,10 @@
         repeat = static_cast<int>(repeatl);
         break;
       }
+      case 'i':  // intentional fall-through
       case 'w': {
         if (optind > argc) {
-          fprintf(stderr, "Missing argument for -w\n");
+          fprintf(stderr, "Missing argument for %s\n", arg.data());
           return EXIT_FAILURE;
         }
         const char* timeout_str = argv[optind++];
@@ -232,7 +240,15 @@
           fprintf(stderr, "Error: bad timeout\n");
           return EXIT_FAILURE;
         }
-        watchdog_timeout_seconds = static_cast<int>(timeout);
+        if (arg.data()[1] == 'w') {
+          watchdog_timeout_seconds = static_cast<int>(timeout);
+        } else {
+          timeout_seconds = static_cast<unsigned int>(timeout);
+          if (watchdog_timeout_seconds == -1 && timeout_seconds > 1 && timeout_seconds <= INT_MAX) {
+            // Give tests a chance to exit cleanly before the timeout kills them.
+            watchdog_timeout_seconds = static_cast<int>(timeout_seconds - 1);
+          }
+        }
         break;
       }
       case 'd': {
@@ -320,8 +336,9 @@
   stopwatch->Start();
   int failed_count = 0;
   fbl::Vector<std::unique_ptr<Result>> results;
-  if (!RunTests(test_paths, test_args, repeat, output_dir, kOutputFileName, verbosity,
-                &failed_count, &results)) {
+  if (!RunTests(test_paths, test_args, repeat,
+                static_cast<uint64_t>(timeout_seconds) * static_cast<uint64_t>(1000), output_dir,
+                kOutputFileName, verbosity, &failed_count, &results)) {
     return EXIT_FAILURE;
   }
 
diff --git a/zircon/system/ulib/runtests-utils/include/runtests-utils/runtests-utils.h b/zircon/system/ulib/runtests-utils/include/runtests-utils/runtests-utils.h
index e3bec275..31c7320 100644
--- a/zircon/system/ulib/runtests-utils/include/runtests-utils/runtests-utils.h
+++ b/zircon/system/ulib/runtests-utils/include/runtests-utils/runtests-utils.h
@@ -136,6 +136,8 @@
 //   each test individually so that:
 //   a) any flakes due to the sequencing of tests can be reproduced
 //   b) we can get an idea of global flake rates without waiting for all runs to complete
+// |timeout_msec| is the number of milliseconds to wait for a test before considering it failed.
+//   ignored if 0.
 // |output_dir| is the output directory for all the tests' output. May be nullptr, in which case
 //   output will not be captured.
 // |output_file_basename| is the basename of the tests' output files. May be nullptr only if
@@ -151,8 +153,8 @@
 //
 // Returns false if any test binary failed, true otherwise.
 bool RunTests(const fbl::Vector<fbl::String>& test_paths, const fbl::Vector<fbl::String>& test_args,
-              int repeat, const char* output_dir, const fbl::StringPiece output_file_basename,
-              signed char verbosity, int* failed_count,
+              int repeat, uint64_t timeout_msec, const char* output_dir,
+              const fbl::StringPiece output_file_basename, signed char verbosity, int* failed_count,
               fbl::Vector<std::unique_ptr<Result>>* results);
 
 // Expands |dir_globs| and searches those directories for files.
diff --git a/zircon/system/ulib/runtests-utils/runtests-utils.cc b/zircon/system/ulib/runtests-utils/runtests-utils.cc
index 61effe0..7c3f420 100644
--- a/zircon/system/ulib/runtests-utils/runtests-utils.cc
+++ b/zircon/system/ulib/runtests-utils/runtests-utils.cc
@@ -321,8 +321,8 @@
 }
 
 bool RunTests(const fbl::Vector<fbl::String>& test_paths, const fbl::Vector<fbl::String>& test_args,
-              int repeat, const char* output_dir, const fbl::StringPiece output_file_basename,
-              signed char verbosity, int* failed_count,
+              int repeat, uint64_t timeout_msec, const char* output_dir,
+              const fbl::StringPiece output_file_basename, signed char verbosity, int* failed_count,
               fbl::Vector<std::unique_ptr<Result>>* results) {
   std::map<fbl::String, int> test_name_to_count;
   for (int i = 1; i <= repeat; ++i) {
@@ -392,8 +392,8 @@
           "RUNNING TEST: %s\n\n",
           output_test_name.c_str());
       fflush(stdout);
-      std::unique_ptr<Result> result =
-          RunTest(argv.data(), output_dir_for_test, output_filename, output_test_name.c_str(), 0);
+      std::unique_ptr<Result> result = RunTest(argv.data(), output_dir_for_test, output_filename,
+                                               output_test_name.c_str(), timeout_msec);
       if (result->launch_status != SUCCESS) {
         *failed_count += 1;
       }
diff --git a/zircon/system/utest/runtests-utils/fuchsia-run-test.cc b/zircon/system/utest/runtests-utils/fuchsia-run-test.cc
index 6445163..7ac22f1 100644
--- a/zircon/system/utest/runtests-utils/fuchsia-run-test.cc
+++ b/zircon/system/utest/runtests-utils/fuchsia-run-test.cc
@@ -225,7 +225,7 @@
   const fbl::String output_dir = JoinPath(test_dir.path(), "output");
   const char output_file_base_name[] = "output.txt";
   ASSERT_EQ(0, MkDirAll(output_dir));
-  EXPECT_TRUE(RunTests({test_name}, {}, 1, output_dir.c_str(), output_file_base_name, verbosity,
+  EXPECT_TRUE(RunTests({test_name}, {}, 1, 0, output_dir.c_str(), output_file_base_name, verbosity,
                        &num_failed, &results));
   EXPECT_EQ(0, num_failed);
   EXPECT_EQ(1, results.size());
@@ -246,7 +246,7 @@
   const fbl::String output_dir = JoinPath(test_dir.path(), "output");
   const char output_file_base_name[] = "output.txt";
   ASSERT_EQ(0, MkDirAll(output_dir));
-  EXPECT_TRUE(RunTests({test_name, test_name, test_name}, {}, 1, output_dir.c_str(),
+  EXPECT_TRUE(RunTests({test_name, test_name, test_name}, {}, 1, 0, output_dir.c_str(),
                        output_file_base_name, verbosity, &num_failed, &results));
   EXPECT_EQ(0, num_failed);
   EXPECT_EQ(3, results.size());
diff --git a/zircon/system/utest/runtests-utils/runtests-utils-test.cc b/zircon/system/utest/runtests-utils/runtests-utils-test.cc
index df99fb4..bab66b3 100644
--- a/zircon/system/utest/runtests-utils/runtests-utils-test.cc
+++ b/zircon/system/utest/runtests-utils/runtests-utils-test.cc
@@ -506,7 +506,7 @@
   const fbl::String output_dir = JoinPath(test_dir.path(), "output");
   const char output_file_base_name[] = "output.txt";
   ASSERT_EQ(0, MkDirAll(output_dir));
-  EXPECT_TRUE(RunTests({succeed_file_name}, {}, 1, output_dir.c_str(), output_file_base_name,
+  EXPECT_TRUE(RunTests({succeed_file_name}, {}, 1, 0, output_dir.c_str(), output_file_base_name,
                        verbosity, &num_failed, &results));
   EXPECT_EQ(0, num_failed);
   EXPECT_EQ(1, results.size());
@@ -537,7 +537,7 @@
   const fbl::String output_dir = JoinPath(test_dir.path(), "output");
   const char output_file_base_name[] = "output.txt";
   ASSERT_EQ(0, MkDirAll(output_dir));
-  EXPECT_TRUE(RunTests({succeed_file_name}, args, 1, output_dir.c_str(), output_file_base_name,
+  EXPECT_TRUE(RunTests({succeed_file_name}, args, 1, 0, output_dir.c_str(), output_file_base_name,
                        verbosity, &num_failed, &results));
   EXPECT_EQ(0, num_failed);
   EXPECT_EQ(1, results.size());
@@ -567,8 +567,8 @@
   const fbl::String output_dir = JoinPath(test_dir.path(), "output");
   const char output_file_base_name[] = "output.txt";
   ASSERT_EQ(0, MkDirAll(output_dir));
-  EXPECT_TRUE(RunTests({does_not_exist_file_name}, {}, 1, output_dir.c_str(), output_file_base_name,
-                       verbosity, &num_failed, &results));
+  EXPECT_TRUE(RunTests({does_not_exist_file_name}, {}, 1, 0, output_dir.c_str(),
+                       output_file_base_name, verbosity, &num_failed, &results));
   EXPECT_EQ(1, num_failed);
   EXPECT_EQ(1, results.size());