[testing] Assume output_dir is set

It seems that the swarming output directory passed to `collect` is
always set for testing swarming tasks now, so we can stop checking if
it's set.

Change-Id: Ib92f9b1c231335c658067369826788c446c2eadd
diff --git a/recipe_modules/testing/api.py b/recipe_modules/testing/api.py
index d4a9040..629560e 100644
--- a/recipe_modules/testing/api.py
+++ b/recipe_modules/testing/api.py
@@ -61,24 +61,24 @@
       env_name,
       from_fuchsia,
       results_dir,
+      output_dir,
       outputs,
       tests,
       legacy_qemu,
       catapult_files,
       api,
-      output_dir=None,
       symbolizer_output=None,
       overwrite_summary=True,
   ):
     self._env_name = env_name
     self._from_fuchsia = from_fuchsia
     self._results_dir = results_dir
+    self._output_dir = output_dir
     self._outputs = outputs
     self._tests = tests
     self._legacy_qemu = legacy_qemu
     self._catapult_files = catapult_files
     self._api = api
-    self._output_dir = output_dir
     self._symbolizer_output = symbolizer_output
 
     if TEST_SUMMARY_JSON in outputs:
@@ -208,17 +208,16 @@
     """Upload select test results (e.g., coverage data) to a given GCS bucket."""
     assert gcs_bucket
     with self._api.step.nest('upload %s test results' % self._env_name):
-      if self.output_dir:
-        if self.summary:
-          # Save the summary JSON to the test shard output dir so it gets
-          # uploaded to GCS for easy access by e.g. Dachsiaboard.
-          summary_path = self.output_dir.join(TEST_SUMMARY_JSON)
-          assert not self._api.path.exists(summary_path), (
-              'test output files should not be named %s' % TEST_SUMMARY_JSON)
-          self._api.file.write_json('write %s' % TEST_SUMMARY_JSON,
-                                    summary_path, self.summary)
+      if self.summary:
+        # Save the summary JSON to the test shard output dir so it gets
+        # uploaded to GCS for easy access by e.g. Dachsiaboard.
+        summary_path = self.output_dir.join(TEST_SUMMARY_JSON)
+        assert not self._api.path.exists(summary_path), (
+            'test output files should not be named %s' % TEST_SUMMARY_JSON)
+        self._api.file.write_json('write %s' % TEST_SUMMARY_JSON, summary_path,
+                                  self.summary)
 
-        self._upload_outputs(gcs_bucket)
+      self._upload_outputs(gcs_bucket)
 
       if upload_to_catapult:
         for catapult_file in self._catapult_files:
@@ -246,14 +245,13 @@
       raise self._api.step.StepFailure('Test failure(s): ' +
                                        ', '.join(failed_tests))
 
+    # Check serial log for failure messages
     # TODO(9936): Replace with running binary tool once created.
-    if self._output_dir:
-      # Check serial log for failure messages
-      fail_strings = ['DEVICE SUSPEND TIMED OUT', 'ASSERT FAILED']
-      log_path = self.output_dir.join(SERIAL_LOG_NAME)
-      self._api.path.mock_add_paths(log_path)
-      if self._api.path.exists(log_path):
-        self._check_log_for_failures(log_path, fail_strings)
+    fail_strings = ['DEVICE SUSPEND TIMED OUT', 'ASSERT FAILED']
+    log_path = self.output_dir.join(SERIAL_LOG_NAME)
+    self._api.path.mock_add_paths(log_path)
+    if self._api.path.exists(log_path):
+      self._check_log_for_failures(log_path, fail_strings)
 
   def _check_log_for_failures(self, log_path, fail_strings):
     """Checks for fail strings in log and fails accordingly."""
@@ -418,23 +416,22 @@
       Args:
         attempt (swarming_retry.Attempt): the attempt to check for logs in
       """
+      # Check serial log for failure messages
       # TODO(9936): Replace with running binary tool once created.
-      if attempt.result.output_dir:
-        # Check serial log for failure messages
-        fail_strings = ['DEVICE SUSPEND TIMED OUT', 'ASSERT FAILED']
-        log_path = attempt.result.output_dir.join(SERIAL_LOG_NAME)
-        self._api.path.mock_add_paths(log_path)
-        if self._api.path.exists(log_path):
-          log_name = self._api.path.basename(log_path)
-          with self._api.step.nest('check log %s' % log_name) as presentation:
-            contents = self._api.file.read_text('read', log_path)
-            for fail_str in fail_strings:
-              if fail_str in contents:
-                presentation.logs[log_name] = contents.splitlines()
-                presentation.status = self._api.step.FAILURE
-                presentation.step_summary_text = 'found "%s"' % fail_str
-                attempt.failure_reason = ('found "%s" in %s' %
-                                          (fail_str, log_name))
+      fail_strings = ['DEVICE SUSPEND TIMED OUT', 'ASSERT FAILED']
+      log_path = attempt.result.output_dir.join(SERIAL_LOG_NAME)
+      self._api.path.mock_add_paths(log_path)
+      if self._api.path.exists(log_path):
+        log_name = self._api.path.basename(log_path)
+        with self._api.step.nest('check log %s' % log_name) as presentation:
+          contents = self._api.file.read_text('read', log_path)
+          for fail_str in fail_strings:
+            if fail_str in contents:
+              presentation.logs[log_name] = contents.splitlines()
+              presentation.status = self._api.step.FAILURE
+              presentation.step_summary_text = 'found "%s"' % fail_str
+              attempt.failure_reason = ('found "%s" in %s' %
+                                        (fail_str, log_name))
 
     def present_status(self, parent_step, attempt, **kwargs):
       """Present an Attempt while showing progress in launch/collect step.
@@ -1870,8 +1867,6 @@
     presentation.status = status
 
   def _find_catapult_files(self, directory):
-    if not directory:  # pragma: no cover
-      return []
     with self.m.step.nest('find catapult files'):
       files = self.m.file.glob_paths(
           'glob',