diff --git a/.editorconfig b/.editorconfig
index d7d9cae..027bdb1 100644
--- a/.editorconfig
+++ b/.editorconfig
@@ -11,3 +11,4 @@
 charset = utf-8
 indent_style = space
 indent_size = 4
+max_line_length = 88
diff --git a/.gitignore b/.gitignore
index 767654b..b521867 100644
--- a/.gitignore
+++ b/.gitignore
@@ -13,6 +13,7 @@
 *.egg-info/
 /build/
 /.venv
+/.mypy_cache
 
 #
 # Editors
@@ -20,6 +21,7 @@
 
 /.idea/
 /.vscode/
+*~
 
 #
 # antlion
@@ -35,6 +37,7 @@
 
 # Local development scripts
 /*.sh
+!/format.sh
 
 #
 # third_party
diff --git a/BUILD.gn b/BUILD.gn
index f2aab56..c1b63f5 100644
--- a/BUILD.gn
+++ b/BUILD.gn
@@ -9,28 +9,40 @@
 
 import("//build/python/python_library.gni")
 
+assert(is_host, "antlion only supported on the host toolchain")
+
 # Tests for full build validation
 group("e2e_tests") {
   testonly = true
-  public_deps = [ "src/antlion/tests:e2e_tests" ]
+  public_deps = [ "tests:e2e_tests" ]
 }
 
 # Subset of tests to validate builds in under 15 minutes.
 group("e2e_tests_quick") {
   testonly = true
-  public_deps = [ "src/antlion/tests:e2e_tests_quick" ]
+  public_deps = [ "tests:e2e_tests_quick" ]
 }
 
 # Tests for at-desk custom validation
 group("e2e_tests_manual") {
   testonly = true
-  public_deps = [ "src/antlion/tests:e2e_tests_manual" ]
+  public_deps = [ "tests:e2e_tests_manual" ]
 }
 
-# deprecated: prefer e2e_tests_quick
-group("smoke_tests") {
+# Tests to validate the netstack in under 15 minutes.
+group("e2e_tests_netstack_quick") {
   testonly = true
-  public_deps = [ ":e2e_tests_quick" ]
+  public_deps = [
+    "tests/dhcp:dhcpv4_duplicate_address_test",
+    "tests/dhcp:dhcpv4_interop_basic_test",
+    "tests/dhcp:dhcpv4_interop_combinatorial_options_test",
+    "tests/wlan/functional:beacon_loss_test",
+    "tests/wlan/performance:channel_sweep_test_quick",
+
+    # TODO(http://b/372467106): Uncomment once ToggleWlanInterfaceStressTest is
+    # updated to use current Fuchsia APIs for removing interfaces.
+    # "tests/netstack:toggle_wlan_interface_stress_test",
+  ]
 }
 
 # Unit tests only
@@ -40,15 +52,14 @@
 }
 
 python_library("antlion") {
-  source_root = "//third_party/antlion/src/antlion"
+  enable_mypy = false
+  source_root = "//third_party/antlion/packages/antlion"
+  testonly = true
   sources = [
     "__init__.py",
     "base_test.py",
-    "bin/__init__.py",
-    "bin/act.py",
     "capabilities/__init__.py",
     "capabilities/ssh.py",
-    "config_parser.py",
     "context.py",
     "controllers/__init__.py",
     "controllers/access_point.py",
@@ -61,8 +72,6 @@
     "controllers/android_lib/events.py",
     "controllers/android_lib/logcat.py",
     "controllers/android_lib/services.py",
-    "controllers/android_lib/tel/__init__.py",
-    "controllers/android_lib/tel/tel_utils.py",
     "controllers/ap_lib/__init__.py",
     "controllers/ap_lib/ap_get_interface.py",
     "controllers/ap_lib/ap_iwconfig.py",
@@ -81,6 +90,7 @@
     "controllers/ap_lib/radvd.py",
     "controllers/ap_lib/radvd_config.py",
     "controllers/ap_lib/radvd_constants.py",
+    "controllers/ap_lib/regulatory_channels.py",
     "controllers/ap_lib/third_party_ap_profiles/__init__.py",
     "controllers/ap_lib/third_party_ap_profiles/actiontec.py",
     "controllers/ap_lib/third_party_ap_profiles/asus.py",
@@ -102,26 +112,13 @@
     "controllers/fuchsia_device.py",
     "controllers/fuchsia_lib/__init__.py",
     "controllers/fuchsia_lib/base_lib.py",
-    "controllers/fuchsia_lib/device_lib.py",
-    "controllers/fuchsia_lib/ffx.py",
-    "controllers/fuchsia_lib/hardware_power_statecontrol_lib.py",
     "controllers/fuchsia_lib/lib_controllers/__init__.py",
-    "controllers/fuchsia_lib/lib_controllers/netstack_controller.py",
     "controllers/fuchsia_lib/lib_controllers/wlan_controller.py",
     "controllers/fuchsia_lib/lib_controllers/wlan_policy_controller.py",
-    "controllers/fuchsia_lib/location/__init__.py",
-    "controllers/fuchsia_lib/location/regulatory_region_lib.py",
-    "controllers/fuchsia_lib/logging_lib.py",
-    "controllers/fuchsia_lib/netstack/__init__.py",
-    "controllers/fuchsia_lib/netstack/netstack_lib.py",
     "controllers/fuchsia_lib/package_server.py",
     "controllers/fuchsia_lib/sl4f.py",
     "controllers/fuchsia_lib/ssh.py",
-    "controllers/fuchsia_lib/utils_lib.py",
-    "controllers/fuchsia_lib/wlan_ap_policy_lib.py",
     "controllers/fuchsia_lib/wlan_deprecated_configuration_lib.py",
-    "controllers/fuchsia_lib/wlan_lib.py",
-    "controllers/fuchsia_lib/wlan_policy_lib.py",
     "controllers/iperf_client.py",
     "controllers/iperf_server.py",
     "controllers/openwrt_ap.py",
@@ -131,6 +128,7 @@
     "controllers/openwrt_lib/openwrt_constants.py",
     "controllers/openwrt_lib/wireless_config.py",
     "controllers/openwrt_lib/wireless_settings_applier.py",
+    "controllers/packet_capture.py",
     "controllers/pdu.py",
     "controllers/pdu_lib/__init__.py",
     "controllers/pdu_lib/digital_loggers/__init__.py",
@@ -145,7 +143,6 @@
     "controllers/sl4a_lib/sl4a_manager.py",
     "controllers/sl4a_lib/sl4a_ports.py",
     "controllers/sl4a_lib/sl4a_session.py",
-    "controllers/sl4a_lib/sl4a_types.py",
     "controllers/sniffer.py",
     "controllers/sniffer_lib/__init__.py",
     "controllers/sniffer_lib/local/__init__.py",
@@ -154,22 +151,26 @@
     "controllers/sniffer_lib/local/tshark.py",
     "controllers/utils_lib/__init__.py",
     "controllers/utils_lib/commands/__init__.py",
+    "controllers/utils_lib/commands/command.py",
+    "controllers/utils_lib/commands/date.py",
     "controllers/utils_lib/commands/ip.py",
+    "controllers/utils_lib/commands/journalctl.py",
+    "controllers/utils_lib/commands/nmcli.py",
+    "controllers/utils_lib/commands/pgrep.py",
     "controllers/utils_lib/commands/route.py",
     "controllers/utils_lib/commands/shell.py",
-    "controllers/utils_lib/host_utils.py",
+    "controllers/utils_lib/commands/tcpdump.py",
     "controllers/utils_lib/ssh/__init__.py",
     "controllers/utils_lib/ssh/connection.py",
     "controllers/utils_lib/ssh/formatter.py",
     "controllers/utils_lib/ssh/settings.py",
-    "dict_object.py",
+    "decorators.py",
     "error.py",
     "event/__init__.py",
     "event/decorators.py",
     "event/event.py",
     "event/event_bus.py",
     "event/event_subscription.py",
-    "event/subscription_bundle.py",
     "event/subscription_handle.py",
     "keys.py",
     "libs/__init__.py",
@@ -188,13 +189,9 @@
     "libs/proc/__init__.py",
     "libs/proc/job.py",
     "libs/proc/process.py",
-    "libs/yaml_writer.py",
     "logger.py",
     "net.py",
-    "records.py",
-    "signals.py",
-    "test_decorators.py",
-    "test_runner.py",
+    "runner.py",
     "test_utils/__init__.py",
     "test_utils/abstract_devices/__init__.py",
     "test_utils/abstract_devices/wlan_device.py",
@@ -202,7 +199,6 @@
     "test_utils/dhcp/__init__.py",
     "test_utils/dhcp/base_test.py",
     "test_utils/fuchsia/__init__.py",
-    "test_utils/fuchsia/utils.py",
     "test_utils/fuchsia/wmm_test_cases.py",
     "test_utils/net/__init__.py",
     "test_utils/net/connectivity_const.py",
@@ -210,19 +206,16 @@
     "test_utils/wifi/__init__.py",
     "test_utils/wifi/base_test.py",
     "test_utils/wifi/wifi_constants.py",
-    "test_utils/wifi/wifi_performance_test_utils/__init__.py",
-    "test_utils/wifi/wifi_performance_test_utils/bokeh_figure.py",
-    "test_utils/wifi/wifi_performance_test_utils/brcm_utils.py",
-    "test_utils/wifi/wifi_performance_test_utils/ping_utils.py",
-    "test_utils/wifi/wifi_performance_test_utils/qcom_utils.py",
-    "test_utils/wifi/wifi_power_test_utils.py",
     "test_utils/wifi/wifi_test_utils.py",
-    "tracelogger.py",
+    "types.py",
     "utils.py",
+    "validation.py",
   ]
   library_deps = [
+    "third_party/github.com/jd/tenacity",
+    "//sdk/fidl/fuchsia.wlan.common:fuchsia.wlan.common_python",
+    "//src/testing/end_to_end/honeydew",
     "//third_party/mobly",
     "//third_party/pyyaml:yaml",
-    "third_party/github.com/jd/tenacity",
   ]
 }
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 248b51f..0c36022 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -10,7 +10,19 @@
 
 ## [Unreleased]
 
-## 0.3.0 - 2023-05-17
+[unreleased]: https://fuchsia.googlesource.com/antlion/+/refs/tags/v0.3.0..refs/heads/main
+
+### Removed
+
+- [BREAKING CHANGE] Support for Python 3.8, 3.9, and 3.10. The minimum supported
+version of Python is now 3.11. If running antlion as part of the Fuchsia tree,
+nothing is required; Python 3.11 is vendored with Fuchsia and will be found by
+GN. If running antlion out of tree, ensure your Python version is at least 3.11.
+- `WlanRvrTest` user params `debug_pre_traffic_cmd` and `debug_post_traffic_cmd`
+
+## [0.3.0] - 2023-05-17
+
+[0.3.0]: https://fuchsia.googlesource.com/antlion/+/refs/tags/v0.2.0..refs/tags/v0.3.0
 
 ### Deprecated
 
@@ -23,16 +35,16 @@
 
 ### Added
 
-- Presubmit testing in [CV][CV] (aka CQ). All tests specified with the
-`qemu_env` environment will run before every antlion CL is submitted.
-- Postsubmit testing in [CI][CI]. See [Milo][builders] for an exhaustive list of
-builders.
-- [EditorConfig](https://editorconfig.org) file for consistent coding styles.
+- Presubmit testing in [CV] (aka CQ). All tests specified with the `qemu_env`
+environment will run before every antlion CL is submitted.
+- Postsubmit testing in [CI]. See [Milo] for an exhaustive list of builders.
+- [EditorConfig] file for consistent coding styles.
 Installing an EditorConfig plugin for your editor is highly recommended.
 
 [CV]: https://chromium.googlesource.com/infra/luci/luci-go/+/refs/heads/main/cv/README.md
 [CI]: https://chromium.googlesource.com/chromium/src/+/master/docs/tour_of_luci_ui.md
-[builders]: https://luci-milo.appspot.com/ui/search?q=antlion
+[Milo]: https://luci-milo.appspot.com/ui/search?q=antlion
+[EditorConfig]: https://editorconfig.org
 
 ### Changed
 
@@ -53,7 +65,7 @@
 - Unused controllers and tests (full list)
 
 ### Fixed
-[unreleased]: https://fuchsia.googlesource.com/antlion/+/refs/tags/v0.2.0..refs/heads/main
+
 - Failure to stop session_manager using ffx in `WlanRebootTest` ([@patricklu],
 [bug](http://b/267330535))
 - Failure to parse 'test_name' in DHCP configuration file in `Dhcpv4InteropTest`
@@ -65,6 +77,8 @@
 
 ## [0.2.0] - 2023-01-03
 
+[0.2.0]: https://fuchsia.googlesource.com/antlion/+/refs/tags/v0.1.0..refs/tags/v0.2.0
+
 ### Added
 
 - Added snapshots before reboot and during test teardown in `WlanRebootTest`
@@ -77,12 +91,12 @@
 
 - All path config options in `FuchsiaDevice` expand the home directory (`~`) and
 environmental variables
-	- Used by `ssh_priv_key`, `authorized_file_loc`, and `ffx_binary_path` for
-	sensible defaults using `$FUCHSIA_DIR`
+  - Used by `ssh_priv_key`, `authorized_file_loc`, and `ffx_binary_path` for
+  sensible defaults using `$FUCHSIA_DIR`
 - Running tests works out of the box without specifying `--testpaths`
-	- Moved `tests` and `unit_tests` to the `antlion` package, enabling
-	straight-forward packaging of tests.
-	- Merged `antlion` and `antlion_contrib` packages
+  - Moved `tests` and `unit_tests` to the `antlion` package, enabling
+  straight-forward packaging of tests.
+  - Merged `antlion` and `antlion_contrib` packages
 - Converted several required dependencies to optional dependencies:
   - `bokeh` is only needed for producing HTML graphing. If this feature is
   desired, install antlion with the bokeh option: `pip install ".[bokeh]"`
@@ -102,19 +116,19 @@
 - Failure to acquire IPv6 address in `WlanRebootTest` ([bug](http://b/256009189))
 - Typo in `ChannelSweepTest` preventing use of iPerf ([@patricklu])
 - "Country code never updated" error affecting all Fuchsia ToT builds
-([@karlward], [bug](https://fxbug.dev/116500))
+([@karlward], [bug](https://fxbug.dev/42067674))
 - Parsing new stderr format from `ffx component destroy` ([@karlward],
-[bug](https://fxbug.dev/116544))
+[bug](https://fxbug.dev/42067722))
 - "Socket operation on non-socket" error during initialization of ffx on MacOS
-([@karlward], [bug](https://fxbug.dev/116626))
+([@karlward], [bug](https://fxbug.dev/42067812))
 - Python 3.8 support for IPv6 scope IDs ([bug](http://b/261746355))
 
-[0.2.0]: https://fuchsia.googlesource.com/antlion/+/refs/tags/v0.1.0..refs/tags/v0.2.0
-
 ## [0.1.0] - 2022-11-28
 
 Forked from ACTS with the following changes
 
+[0.1.0]: https://fuchsia.googlesource.com/antlion/+/refs/tags/v0.1.0
+
 ### Added
 
 - A modern approach to installation using `pyproject.toml` via `pip install .`
@@ -125,6 +139,8 @@
 - Package and import names from ACTS to antlion
 - Copyright notice from AOSP to Fuchsia Authors
 
+[src-layout]: https://setuptools.pypa.io/en/latest/userguide/package_discovery.html#src-layout
+
 ### Deprecated
 
 - Use of the `setup.py` script. This is only used to keep infrastructure
@@ -143,9 +159,6 @@
 - KeyError for 'mac_addr' in WlanDeprecatedConfigurationTest ([@sakuma],
 [bug](http://b/237709921))
 
-[0.1.0]: https://fuchsia.googlesource.com/antlion/+/refs/tags/v0.1.0
-[src-layout]: https://setuptools.pypa.io/en/latest/userguide/package_discovery.html#src-layout
-
 [@sakuma]: https://fuchsia-review.git.corp.google.com/q/owner:sakuma%2540google.com
 [@patricklu]: https://fuchsia-review.git.corp.google.com/q/owner:patricklu%2540google.com
 [@karlward]: https://fuchsia-review.git.corp.google.com/q/owner:karlward%2540google.com
diff --git a/MANIFEST.in b/MANIFEST.in
index a8ad1bb..a6caf7f 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,4 +1,4 @@
 include setup.py README.md
-recursive-include src/antlion *
+recursive-include packages/antlion *
 global-exclude .DS_Store
 global-exclude *.pyc
diff --git a/OWNERS b/OWNERS
index 1f83792..2f62318 100644
--- a/OWNERS
+++ b/OWNERS
@@ -1,2 +1,5 @@
-sbalana@google.com
 patricklu@google.com
+priyankach@google.com
+paulsethi@google.com
+jpavankumar@google.com
+guptaritu@google.com
diff --git a/README.md b/README.md
index 7d5950b..6c211f7 100644
--- a/README.md
+++ b/README.md
@@ -7,7 +7,7 @@
 
 [TOC]
 
-[Docs]: http://go/fxca
+[Docs]: http://go/antlion
 [Report Bug]: http://go/conn-test-bug
 [Request Feature]: http://b/issues/new?component=1182297&template=1680893
 
@@ -17,7 +17,7 @@
 enables antlion tests that do not require hardware-specific capabilities like
 WLAN. This is especially useful to verify if antlion builds and runs without
 syntax errors. If you require WLAN capabilities, see
-[below](#running-with-a-physical-device).
+[below](#running-with-a-local-physical-device).
 
 1. [Checkout Fuchsia](https://fuchsia.dev/fuchsia-src/get-started/get_fuchsia_source)
 
@@ -47,7 +47,7 @@
 5. Run an antlion test
 
    ```sh
-   fx test --e2e --output //third_party/antlion/src/antlion/tests/examples:sl4f_sanity_test
+   fx test --e2e --output //third_party/antlion/tests/examples:sl4f_sanity_test
    ```
 
 ## Running with a local physical device
@@ -55,22 +55,19 @@
 A physical device is required for most antlion tests, which rely on physical I/O
 such as WLAN and Bluetooth. Antlion is designed to make testing physical devices
 as easy, reliable, and reproducible as possible. The device will be discovered
-using mDNS, so make sure your host machine has a network connection to the
-device.
+using FFX or mDNS, so make sure your host machine has a network connection to
+the device.
 
 1. Configure and build Fuchsia for your target with the following extra
    arguments:
 
    ```sh
    fx set core.my-super-cool-product \
-      --with //src/testing/sl4f \
-      --with //src/sys/bin/start_sl4f \
-      --args='core_realm_shards += [ "//src/testing/sl4f:sl4f_core_shard" ]' \
       --with-host //third_party/antlion:e2e_tests
    fx build
    ```
 
-2. Flash your device with the new build
+2. Ensure your device is flashed with an appropriate build
 
 3. In a separate terminal, run a package server
 
@@ -81,17 +78,47 @@
 4. Run an antlion test
 
    ```sh
-   fx test --e2e --output //third_party/antlion/src/antlion/tests/functional:ping_stress_test
+   fx test --e2e --output //third_party/antlion/tests/functional:ping_stress_test
    ```
 
-> Local auxiliary devices are not yet support by `antlion-runner`, which is
-> responsible for generating Mobly configs. In the meantime, see the
-> section below for manually crafting Mobly configs to support auxiliary
-> devices.
+If you would like to include an AP in your test config:
+
+1. Run a test with an AP
+
+   ```sh
+   fx test --e2e --output //third_party/antlion/tests/functional:wlan_scan_test_without_wpa2 \
+      -- --ap-ip 192.168.1.50 --ap-ssh-port 22
+   ```
+
+If you would like to skip device discovery, or use further auxiliary devices,
+you can generate your own Mobly config.
+
+1. Write the config
+
+   ```sh
+   cat <<EOF > my-antlion-config.yaml
+   TestBeds:
+
+   - Name: antlion-runner
+   Controllers:
+      FuchsiaDevice:
+      - mdns_name: fuchsia-00e0-4c01-04df
+        ip: ::1
+        ssh_port: 8022
+   MoblyParams:
+      LogPath: logs
+   EOF
+   ```
+
+1. Run an antlion test
+
+   ```sh
+   fx test --e2e --output //third_party/antlion/tests/functional:ping_stress_test -- --config-override $PWD/my-antlion-config.yaml
+   ```
 
 ## Running without a Fuchsia checkout
 
-Requires Python 3.8+
+Requires Python 3.11+
 
 1. Clone the repo
 
@@ -135,7 +162,7 @@
 4. Run the sanity test
 
    ```sh
-   python src/antlion/tests/examples/Sl4fSanityTest.py -c simple-config.yaml
+   python tests/examples/Sl4fSanityTest.py -c simple-config.yaml
    ```
 
 ## Contributing
@@ -155,9 +182,6 @@
 - Install an [EditorConfig](https://editorconfig.org/) plugin for consistent
   whitespace
 
-- Install [Black](https://pypi.org/project/black/) our preferred code formatter.
-  Optionally, add the extension to your editor.
-
 - Complete the steps in '[Contribute source changes]' to gain authorization to
   upload CLs to Fuchsia's Gerrit.
 
@@ -166,12 +190,19 @@
 1. Create a branch (`git checkout -b feature/amazing-feature`)
 2. Make changes
 3. Document the changes in `CHANGELOG.md`
-4. Run your change through `Black` formatter
-5. Commit changes (`git add . && git commit -m 'Add some amazing feature'`)
-6. Upload CL (`git push origin HEAD:refs/for/main`)
+4. Auto-format changes (`./format.sh`)
+
+   > Note: antlion follows the [Black code style] (rather than the
+   > [Google Python Style Guide])
+
+5. Verify no typing errors (`mypy .`)
+6. Commit changes (`git add . && git commit -m 'Add some amazing feature'`)
+7. Upload CL (`git push origin HEAD:refs/for/main`)
 
 > A public bug tracker is not (yet) available.
 
+[Black code style]: https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html
+[Google Python Style Guide]: https://google.github.io/styleguide/pyguide.html
 [Contribute source changes]: https://fuchsia.dev/fuchsia-src/development/source_code/contribute_changes#prerequisites
 
 ### Recommended git aliases
diff --git a/antlion_host_test.gni b/antlion_host_test.gni
index 96f7654..1d09f13 100644
--- a/antlion_host_test.gni
+++ b/antlion_host_test.gni
@@ -1,3 +1,8 @@
+# Copyright 2024 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/host.gni")
 import("//build/python/python_binary.gni")
 import("//build/rust/rustc_binary.gni")
 import("//build/testing/host_test.gni")
@@ -5,12 +10,18 @@
 
 # Declares a host-side antlion test.
 #
-# Example
+# Examples
 #
 # ```
-# antlion_host_test("Sl4fSanityTest") {
+# antlion_host_test("sl4f_sanity_test") {
 #   main_source = "Sl4fSanityTest.py"
 # }
+#
+# antlion_host_test("wlan_rvr_test_2g") {
+#   main_source = "WlanRvrTest.py"
+#   test_params = "rvr_settings.yaml"
+#   test_cases = [ "test_rvr_11n_2g_*" ]
+# }
 # ```
 #
 # Parameters
@@ -29,10 +40,15 @@
 #    to the test in the antlion config under the "test_params" key.
 #    Type: string
 #
-#  extra_args (optional)
-#    Additional arguments to pass to the test.
+#  test_cases (optional)
+#    List of test cases to run. Defaults to running all test cases.
 #    Type: list(string)
 #
+#  test_data_deps (optional)
+#    List of test data GN targets that are needed at runtime.
+#    Type: list(string)
+#    Default: empty list
+#
 #   deps
 #   environments
 #   visibility
@@ -47,12 +63,21 @@
   python_binary(_python_binary_target) {
     forward_variables_from(invoker,
                            [
+                             "enable_mypy",
                              "main_source",
                              "sources",
+                             "data_sources",
+                             "data_package_name",
                            ])
     output_name = _python_binary_name
-    main_callable = "test_runner.main" # Mobly-specific entry point.
+    main_callable = "test_runner.main"  # Mobly-specific entry point.
     deps = [ "//third_party/antlion" ]
+    if (defined(invoker.test_data_deps)) {
+      deps += invoker.test_data_deps
+    }
+    if (defined(invoker.libraries)) {
+      deps += invoker.libraries
+    }
     testonly = true
     visibility = [ ":*" ]
   }
@@ -82,7 +107,9 @@
   host_test_data(_host_test_data_ssh) {
     testonly = true
     visibility = [ ":*" ]
-    sources = [ "//prebuilt/third_party/openssh-portable/${host_os}-${host_cpu}/bin/ssh" ]
+    sources = [
+      "//prebuilt/third_party/openssh-portable/${host_os}-${host_cpu}/bin/ssh",
+    ]
     outputs = [ "${_test_dir}/ssh" ]
   }
 
@@ -106,9 +133,12 @@
   host_test_data(_host_test_data_ffx) {
     testonly = true
     visibility = [ ":*" ]
-    sources = [ get_label_info("//src/developer/ffx", "root_out_dir") + "/ffx" ]
+    sources = [ "${host_tools_dir}/ffx" ]
     outputs = [ "${_test_dir}/ffx" ]
-    deps = [ "//src/developer/ffx:ffx_bin($host_toolchain)" ]
+    deps = [
+      "//src/developer/ffx:host",
+      "//src/developer/ffx:suite_test_data",
+    ]
   }
 
   #
@@ -119,6 +149,9 @@
                            [
                              "environments",
                              "visibility",
+                             "isolated",
+                             "product_bundle",
+                             "timeout_secs",
                            ])
 
     binary_path = "${root_out_dir}/antlion-runner"
@@ -132,15 +165,24 @@
       rebase_path("${_test_dir}", root_build_dir),
       "--ffx-binary",
       rebase_path("${_test_dir}/ffx", root_build_dir),
+      "--ffx-subtools-search-path",
+      rebase_path(host_tools_dir, root_build_dir),
       "--ssh-binary",
       rebase_path("${_test_dir}/ssh", root_build_dir),
     ]
 
+    if (defined(invoker.test_cases)) {
+      args += invoker.test_cases
+    }
+
+    data_deps = [ "//src/developer/ffx:suite_test_data" ]
+
     deps = [
       ":${_host_test_data_ffx}",
       ":${_host_test_data_ssh}",
       ":${_host_test_data_target}",
       "//build/python:interpreter",
+      "//src/testing/end_to_end/honeydew",
       "//third_party/antlion/runner",
     ]
 
@@ -152,8 +194,8 @@
       deps += [ ":${_host_test_data_test_params}" ]
     }
 
-    if (defined(invoker.extra_args)) {
-      args += invoker.extra_args
+    if (defined(invoker.test_data_deps)) {
+      deps += invoker.test_data_deps
     }
   }
 }
diff --git a/environments.gni b/environments.gni
index 2bdfb53..1aee7c2 100644
--- a/environments.gni
+++ b/environments.gni
@@ -2,29 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-antlion_astro_env = {
-  dimensions = {
-    device_type = "Astro"
-    pool = "fuchsia.tests.connectivity"
-  }
-  tags = [ "antlion" ]
-}
-
-antlion_sherlock_env = {
-  dimensions = {
-    device_type = "Sherlock"
-    pool = "fuchsia.tests.connectivity"
-  }
-  tags = [ "antlion" ]
-}
-
-antlion_nelson_env = {
-  dimensions = {
-    device_type = "Nelson"
-    pool = "fuchsia.tests.connectivity"
-  }
-  tags = [ "antlion" ]
-}
+import("//build/testing/environments.gni")
 
 astro_ap_env = {
   dimensions = {
@@ -116,26 +94,100 @@
   tags = [ "antlion" ]
 }
 
+nuc11_ap_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Intel NUC Kit NUC11TNHv5"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+nuc11_ap_iperf_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Intel NUC Kit NUC11TNHv5"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+nuc11_ap_iperf_attenuator_env = {
+  dimensions = {
+    access_points = "1"
+    attenuators = "1"
+    device_type = "Intel NUC Kit NUC11TNHv5"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+vim3_ap_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Vim3"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+vim3_ap_iperf_env = {
+  dimensions = {
+    access_points = "1"
+    device_type = "Vim3"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+vim3_ap_iperf_attenuator_env = {
+  dimensions = {
+    access_points = "1"
+    attenuators = "1"
+    device_type = "Vim3"
+    iperf_servers = "1"
+    pool = "fuchsia.tests.connectivity"
+  }
+  tags = [ "antlion" ]
+}
+
+# Display environments supported by antlion.
 display_envs = [
-  antlion_astro_env,
-  antlion_sherlock_env,
-  antlion_nelson_env,
+  astro_env,
+  sherlock_env,
+  nelson_env,
+  nuc11_env,
+  vim3_env,
+  wlan_astro_env,
+  wlan_sherlock_env,
+  wlan_nelson_env,
+  wlan_nuc11_env,
+  wlan_vim3_env,
 ]
 
 display_ap_envs = [
   astro_ap_env,
   sherlock_ap_env,
   nelson_ap_env,
+  nuc11_ap_env,
+  vim3_ap_env,
 ]
 
 display_ap_iperf_envs = [
   astro_ap_iperf_env,
   sherlock_ap_iperf_env,
   nelson_ap_iperf_env,
+  nuc11_ap_iperf_env,
+  vim3_ap_iperf_env,
 ]
 
 display_ap_iperf_attenuator_envs = [
   astro_ap_iperf_attenuator_env,
   sherlock_ap_iperf_attenuator_env,
   nelson_ap_iperf_attenuator_env,
+  nuc11_ap_iperf_attenuator_env,
+  vim3_ap_iperf_attenuator_env,
 ]
diff --git a/format.sh b/format.sh
new file mode 100755
index 0000000..8ede1f6
--- /dev/null
+++ b/format.sh
@@ -0,0 +1,97 @@
+#!/bin/bash
+
+# Get the directory of this script
+SCRIPT_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")"
+
+install_virtual_environment_doc() {
+    echo "Please install the virtual environment before running format.sh by running"
+    echo "the following commands:"
+    echo ""
+    echo "  cd $SCRIPT_DIR"
+    echo "  python3 -m venv .venv"
+    echo "  (source .venv/bin/activate && pip install -e \".[dev]\")"
+}
+
+if [ -f "$SCRIPT_DIR/.venv/bin/activate" ] ; then
+    source "$SCRIPT_DIR/.venv/bin/activate"
+else
+    echo ""
+    echo "====================="
+    echo "Error: Virtual environment not installed!"
+    echo "====================="
+    echo ""
+    install_virtual_environment_doc
+    echo ""
+    exit 1
+fi
+
+# Verify expected virtual environment binaries exist to prevent unintentionally running
+# different versions from outside the environment.
+#
+# Note: The virtual environment may exist without the binaries if dependencies weren't installed
+# (e.g., running `python3 -m venv .venv` without `pip install -e '.[dev]'`).
+find_venv_binary() {
+    find .venv/bin -name $1 | grep -q .
+}
+
+venv_binaries="autoflake black isort"
+all_binaries_found=true
+
+for binary in $venv_binaries; do
+    if ! find_venv_binary $binary; then
+        all_binaries_found=false
+        echo "Error: $binary not installed in virtual environment"
+    fi
+done
+
+if ! $all_binaries_found; then
+    echo ""
+    install_virtual_environment_doc
+    echo ""
+    exit 1
+fi
+
+# Detect trivial unused code.
+#
+# Automatically removal is possible, but is considered an unsafe operation. When a
+# change hasn't been commited, automatic removal could cause unintended irreversible
+# loss of in-progress code.
+#
+# Note: This cannot detect unused code between modules or packages. For complex unused
+# code detection, vulture should be used.
+autoflake \
+    --quiet \
+    --check-diff \
+    --remove-duplicate-keys \
+    --remove-unused-variables \
+    --remove-all-unused-imports \
+    --recursive .
+
+if [ $? -eq 0 ]; then
+    echo "No unused code found"
+else
+    echo ""
+    echo "====================="
+    echo "Unused code detected!"
+    echo "====================="
+    echo ""
+    echo "If these changes are trivial, consider running:"
+    echo "\"autoflake --in-place --remove-unused-variables --remove-all-unused-imports -r .\""
+    echo ""
+    read -p "Run this command to remove all unused code? [y/n] " -n 1 -r
+    echo ""
+    echo ""
+
+    if [[ $REPLY =~ ^[Yy]$ ]]; then
+        autoflake --in-place --remove-unused-variables --remove-all-unused-imports -r .
+    else
+        exit 1
+    fi
+fi
+
+# Sort imports to avoid bikeshedding.
+isort .
+
+# Format code; also to avoid bikeshedding.
+black .
+
diff --git a/src/antlion/__init__.py b/packages/antlion/__init__.py
similarity index 100%
rename from src/antlion/__init__.py
rename to packages/antlion/__init__.py
diff --git a/packages/antlion/base_test.py b/packages/antlion/base_test.py
new file mode 100755
index 0000000..35e4c24
--- /dev/null
+++ b/packages/antlion/base_test.py
@@ -0,0 +1,73 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import inspect
+import re
+from typing import Callable
+
+from mobly.base_test import BaseTestClass
+from mobly.base_test import Error as MoblyError
+
+
+class AntlionBaseTest(BaseTestClass):
+    # TODO(b/415313773): Remove this once wlanix tests are updated to use mobly's base_test.py
+    # instead of AntlionBaseTest class, as the missing functionality is now merged into Mobly.
+    def _get_test_methods(
+        self, test_names: list[str]
+    ) -> list[tuple[str, Callable[[], None]]]:
+        """Resolves test method names to bound test methods.
+
+        Args:
+            test_names: Test method names.
+
+        Returns:
+            List of tuples containing the test method name and the function implementing
+            its logic.
+
+        Raises:
+            MoblyError: test_names does not match any tests.
+        """
+
+        test_table: dict[str, Callable[[], None]] = {**self._generated_test_table}
+        for name, _ in inspect.getmembers(type(self), callable):
+            if name.startswith("test_"):
+                test_table[name] = getattr(self, name)
+
+        test_methods: list[tuple[str, Callable[[], None]]] = []
+        for test_name in test_names:
+            if test_name in test_table:
+                test_methods.append((test_name, test_table[test_name]))
+            else:
+                try:
+                    pattern = re.compile(test_name)
+                except Exception as e:
+                    raise MoblyError(
+                        f'"{test_name}" is not a valid regular expression'
+                    ) from e
+                for name in test_table:
+                    if pattern.fullmatch(name.strip()):
+                        test_methods.append((name, test_table[name]))
+
+        if len(test_methods) == 0:
+            all_patterns = '" or "'.join(test_names)
+            all_tests = "\n - ".join(test_table.keys())
+            raise MoblyError(
+                f"{self.TAG} does not declare any tests matching "
+                f'"{all_patterns}". Please verify the correctness of '
+                f"{self.TAG} test names: \n - {all_tests}"
+            )
+
+        return test_methods
diff --git a/src/antlion/capabilities/__init__.py b/packages/antlion/capabilities/__init__.py
similarity index 100%
rename from src/antlion/capabilities/__init__.py
rename to packages/antlion/capabilities/__init__.py
diff --git a/packages/antlion/capabilities/ssh.py b/packages/antlion/capabilities/ssh.py
new file mode 100644
index 0000000..1dddd55
--- /dev/null
+++ b/packages/antlion/capabilities/ssh.py
@@ -0,0 +1,456 @@
+#!/usr/bin/env python3
+#
+# Copyright 2023 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+import logging
+import os
+import shlex
+import shutil
+import signal
+import subprocess
+import time
+from dataclasses import dataclass
+from typing import IO, Mapping
+
+from mobly import logger, signals
+
+from antlion.net import wait_for_port
+from antlion.runner import CalledProcessError, CalledProcessTransportError, Runner
+from antlion.types import Json
+from antlion.validation import MapValidator
+
+DEFAULT_SSH_PORT: int = 22
+DEFAULT_SSH_TIMEOUT_SEC: float = 60.0
+DEFAULT_SSH_CONNECT_TIMEOUT_SEC: int = 90
+DEFAULT_SSH_SERVER_ALIVE_INTERVAL: int = 30
+# The default package repository for all components.
+
+
+class SSHResult:
+    """Result of an SSH command."""
+
+    def __init__(
+        self,
+        process: (
+            subprocess.CompletedProcess[bytes]
+            | subprocess.CompletedProcess[str]
+            | subprocess.CalledProcessError
+        ),
+    ) -> None:
+        if isinstance(process.stdout, bytes):
+            self._stdout_bytes = process.stdout
+        elif isinstance(process.stdout, str):
+            self._stdout = process.stdout
+        else:
+            raise TypeError(
+                "Expected process.stdout to be either bytes or str, "
+                f"got {type(process.stdout)}"
+            )
+
+        if isinstance(process.stderr, bytes):
+            self._stderr_bytes = process.stderr
+        elif isinstance(process.stderr, str):
+            self._stderr = process.stderr
+        else:
+            raise TypeError(
+                "Expected process.stderr to be either bytes or str, "
+                f"got {type(process.stderr)}"
+            )
+
+        self._exit_status = process.returncode
+
+    def __str__(self) -> str:
+        if self.exit_status == 0:
+            return self.stdout
+        return f'status {self.exit_status}, stdout: "{self.stdout}", stderr: "{self.stderr}"'
+
+    @property
+    def stdout(self) -> str:
+        if not hasattr(self, "_stdout"):
+            self._stdout = self._stdout_bytes.decode("utf-8", errors="replace")
+        return self._stdout
+
+    @property
+    def stdout_bytes(self) -> bytes:
+        if not hasattr(self, "_stdout_bytes"):
+            self._stdout_bytes = self._stdout.encode()
+        return self._stdout_bytes
+
+    @property
+    def stderr(self) -> str:
+        if not hasattr(self, "_stderr"):
+            self._stderr = self._stderr_bytes.decode("utf-8", errors="replace")
+        return self._stderr
+
+    @property
+    def exit_status(self) -> int:
+        return self._exit_status
+
+
+class SSHError(signals.TestError):
+    """A SSH command returned with a non-zero status code."""
+
+    def __init__(
+        self, command: list[str], result: CalledProcessError, elapsed_sec: float
+    ):
+        if result.returncode < 0:
+            try:
+                reason = f"died with {signal.Signals(-result.returncode)}"
+            except ValueError:
+                reason = f"died with unknown signal {-result.returncode}"
+        else:
+            reason = f"unexpectedly returned {result.returncode}"
+
+        super().__init__(
+            f'SSH command "{" ".join(command)}" {reason} after {elapsed_sec:.2f}s\n'
+            f'stderr: {result.stderr.decode("utf-8", errors="replace")}\n'
+            f'stdout: {result.stdout.decode("utf-8", errors="replace")}\n'
+        )
+        self.result = result
+
+
+@dataclass
+class SSHConfig:
+    """SSH client config."""
+
+    # SSH flags. See ssh(1) for full details.
+    user: str
+    host_name: str
+    identity_file: str
+
+    ssh_binary: str = "ssh"
+    config_file: str = "/dev/null"
+    port: int = 22
+
+    #
+    # SSH options. See ssh_config(5) for full details.
+    #
+    connect_timeout: int = DEFAULT_SSH_CONNECT_TIMEOUT_SEC
+    server_alive_interval: int = DEFAULT_SSH_SERVER_ALIVE_INTERVAL
+    strict_host_key_checking: bool = False
+    user_known_hosts_file: str = "/dev/null"
+    log_level: str = "ERROR"
+
+    # Force allocation of a pseudo-tty. This can be used to execute arbitrary
+    # screen-based programs on a remote machine, which can be very useful, e.g.
+    # when implementing menu services.
+    force_tty: bool = False
+
+    def full_command(self, command: list[str]) -> list[str]:
+        """Generate the complete command to execute command over SSH.
+
+        Args:
+            command: The command to run over SSH
+            force_tty: Force pseudo-terminal allocation. This can be used to
+                execute arbitrary screen-based programs on a remote machine,
+                which can be very useful, e.g. when implementing menu services.
+
+        Returns:
+            Arguments composing the complete call to SSH.
+        """
+        return [
+            self.ssh_binary,
+            # SSH flags
+            "-i",
+            self.identity_file,
+            "-F",
+            self.config_file,
+            "-p",
+            str(self.port),
+            # SSH configuration options
+            "-o",
+            f"ConnectTimeout={self.connect_timeout}",
+            "-o",
+            f"ServerAliveInterval={self.server_alive_interval}",
+            "-o",
+            f'StrictHostKeyChecking={"yes" if self.strict_host_key_checking else "no"}',
+            "-o",
+            f"UserKnownHostsFile={self.user_known_hosts_file}",
+            "-o",
+            f"LogLevel={self.log_level}",
+            "-o",
+            f'RequestTTY={"force" if self.force_tty else "auto"}',
+            f"{self.user}@{self.host_name}",
+        ] + command
+
+    @staticmethod
+    def from_config(config: Mapping[str, Json]) -> "SSHConfig":
+        c = MapValidator(config)
+        ssh_binary_path = c.get(str, "ssh_binary_path", None)
+        if ssh_binary_path is None:
+            found_path = shutil.which("ssh")
+            if not isinstance(found_path, str):
+                raise ValueError("Failed to find ssh in $PATH")
+            ssh_binary_path = found_path
+
+        return SSHConfig(
+            user=c.get(str, "user"),
+            host_name=c.get(str, "host"),
+            identity_file=c.get(str, "identity_file"),
+            ssh_binary=ssh_binary_path,
+            config_file=c.get(str, "ssh_config", "/dev/null"),
+            port=c.get(int, "port", 22),
+            connect_timeout=c.get(int, "connect_timeout", 30),
+        )
+
+
+class SSHProvider(Runner):
+    """Device-specific provider for SSH clients."""
+
+    def __init__(self, config: SSHConfig) -> None:
+        """
+        Args:
+            config: SSH client config
+        """
+        logger_tag = f"ssh | {config.host_name}"
+        if config.port != DEFAULT_SSH_PORT:
+            logger_tag += f":{config.port}"
+
+        # Escape IPv6 interface identifier if present.
+        logger_tag = logger_tag.replace("%", "%%")
+
+        self.log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[{logger_tag}]",
+            },
+        )
+
+        self.config = config
+
+        try:
+            self.wait_until_reachable()
+            self.log.info("sshd is reachable")
+        except Exception as e:
+            raise TimeoutError("sshd is unreachable") from e
+
+    def wait_until_reachable(self) -> None:
+        """Wait for the device to become reachable via SSH.
+
+        Raises:
+            TimeoutError: connect_timeout has expired without a successful SSH
+                connection to the device
+            CalledProcessTransportError: SSH is available on the device but
+                connect_timeout has expired and SSH fails to run
+            subprocess.TimeoutExpired: when the timeout expires while waiting
+                for a child process
+        """
+        timeout_sec = self.config.connect_timeout
+        timeout = time.time() + timeout_sec
+        wait_for_port(self.config.host_name, self.config.port, timeout_sec=timeout_sec)
+
+        while True:
+            try:
+                self._run(
+                    ["echo"], stdin=None, timeout_sec=timeout_sec, log_output=True
+                )
+                return
+            except CalledProcessTransportError as e:
+                # Repeat if necessary; _run() can exit prematurely by receiving
+                # SSH transport errors. These errors can be caused by sshd not
+                # being fully initialized yet.
+                if time.time() < timeout:
+                    continue
+                else:
+                    raise e
+
+    def wait_until_unreachable(
+        self,
+        interval_sec: int = 1,
+        timeout_sec: int = DEFAULT_SSH_CONNECT_TIMEOUT_SEC,
+    ) -> None:
+        """Wait for the device to become unreachable via SSH.
+
+        Args:
+            interval_sec: Seconds to wait between unreachability attempts
+            timeout_sec: Seconds to wait until raising TimeoutError
+
+        Raises:
+            TimeoutError: when timeout_sec has expired without an unsuccessful
+                SSH connection to the device
+        """
+        timeout = time.time() + timeout_sec
+
+        while True:
+            try:
+                wait_for_port(
+                    self.config.host_name,
+                    self.config.port,
+                    timeout_sec=interval_sec,
+                )
+            except TimeoutError:
+                return
+
+            if time.time() < timeout:
+                raise TimeoutError(
+                    f"Connection to {self.config.host_name} is still reachable "
+                    f"after {timeout_sec}s"
+                )
+
+    def run(
+        self,
+        command: str | list[str],
+        stdin: bytes | None = None,
+        timeout_sec: float | None = DEFAULT_SSH_TIMEOUT_SEC,
+        log_output: bool = True,
+        connect_retries: int = 3,
+    ) -> subprocess.CompletedProcess[bytes]:
+        """Run a command on the device then exit.
+
+        Args:
+            command: String to send to the device.
+            stdin: Standard input to command.
+            timeout_sec: Seconds to wait for the command to complete.
+            connect_retries: Amount of times to retry connect on fail.
+
+        Raises:
+            subprocess.CalledProcessError: when the process exits with a non-zero status
+            subprocess.TimeoutExpired: when the timeout expires while waiting
+                for a child process
+            CalledProcessTransportError: when the underlying transport fails
+
+        Returns:
+            SSHResults from the executed command.
+        """
+        if isinstance(command, str):
+            s = shlex.shlex(command, posix=True, punctuation_chars=True)
+            s.whitespace_split = True
+            command = list(s)
+        return self._run_with_retry(
+            command, stdin, timeout_sec, log_output, connect_retries
+        )
+
+    def _run_with_retry(
+        self,
+        command: list[str],
+        stdin: bytes | None,
+        timeout_sec: float | None,
+        log_output: bool,
+        connect_retries: int,
+    ) -> subprocess.CompletedProcess[bytes]:
+        err: Exception = ValueError("connect_retries cannot be 0")
+        for _ in range(0, connect_retries):
+            try:
+                return self._run(command, stdin, timeout_sec, log_output)
+            except CalledProcessTransportError as e:
+                err = e
+                self.log.warning("Connect failed: %s", e)
+        raise err
+
+    def _run(
+        self,
+        command: list[str],
+        stdin: bytes | None,
+        timeout_sec: float | None,
+        log_output: bool,
+    ) -> subprocess.CompletedProcess[bytes]:
+        start = time.perf_counter()
+        with self.start(command) as process:
+            try:
+                stdout, stderr = process.communicate(stdin, timeout_sec)
+            except subprocess.TimeoutExpired as e:
+                process.kill()
+                process.wait()
+                raise e
+            except:  # Including KeyboardInterrupt, communicate handled that.
+                process.kill()
+                # We don't call process.wait() as Popen.__exit__ does that for
+                # us.
+                raise
+
+            elapsed = time.perf_counter() - start
+            exit_code = process.poll()
+
+            if log_output:
+                self.log.debug(
+                    "Command %s exited with %d after %.2fs\nstdout: %s\nstderr: %s",
+                    " ".join(command),
+                    exit_code,
+                    elapsed,
+                    stdout.decode("utf-8", errors="replace"),
+                    stderr.decode("utf-8", errors="replace"),
+                )
+            else:
+                self.log.debug(
+                    "Command %s exited with %d after %.2fs",
+                    " ".join(command),
+                    exit_code,
+                    elapsed,
+                )
+
+            if exit_code is None:
+                raise ValueError(
+                    f'Expected process to be terminated: "{" ".join(command)}"'
+                )
+
+            if exit_code:
+                err = CalledProcessError(
+                    exit_code, process.args, output=stdout, stderr=stderr
+                )
+
+                if err.returncode == 255:
+                    reason = stderr.decode("utf-8", errors="replace")
+                    if (
+                        "Name or service not known" in reason
+                        or "Host does not exist" in reason
+                    ):
+                        raise CalledProcessTransportError(
+                            f"Hostname {self.config.host_name} cannot be resolved to an address"
+                        ) from err
+                    if "Connection timed out" in reason:
+                        raise CalledProcessTransportError(
+                            f"Failed to establish a connection to {self.config.host_name} within {timeout_sec}s"
+                        ) from err
+                    if "Connection refused" in reason:
+                        raise CalledProcessTransportError(
+                            f"Connection refused by {self.config.host_name}"
+                        ) from err
+
+                raise err
+
+        return subprocess.CompletedProcess(process.args, exit_code, stdout, stderr)
+
+    def run_async(self, command: str) -> subprocess.CompletedProcess[bytes]:
+        s = shlex.shlex(command, posix=True, punctuation_chars=True)
+        s.whitespace_split = True
+        command_split = list(s)
+
+        process = self.start(command_split)
+        return subprocess.CompletedProcess(
+            self.config.full_command(command_split),
+            returncode=0,
+            stdout=str(process.pid).encode("utf-8"),
+            stderr=None,
+        )
+
+    def start(
+        self,
+        command: list[str],
+        stdout: IO[bytes] | int = subprocess.PIPE,
+        stdin: IO[bytes] | int = subprocess.PIPE,
+    ) -> subprocess.Popen[bytes]:
+        full_command = self.config.full_command(command)
+        self.log.debug(
+            f"Starting: {' '.join(command)}\nFull command: {' '.join(full_command)}"
+        )
+        return subprocess.Popen(
+            full_command,
+            stdin=stdin,
+            stdout=stdout if stdout else subprocess.PIPE,
+            stderr=subprocess.PIPE,
+            preexec_fn=os.setpgrp,
+        )
diff --git a/src/antlion/context.py b/packages/antlion/context.py
similarity index 90%
rename from src/antlion/context.py
rename to packages/antlion/context.py
index cfe9df8..3f2481f 100644
--- a/src/antlion/context.py
+++ b/packages/antlion/context.py
@@ -19,13 +19,14 @@
 import os
 
 from antlion.event import event_bus
-from antlion.event.event import Event
-from antlion.event.event import TestCaseBeginEvent
-from antlion.event.event import TestCaseEndEvent
-from antlion.event.event import TestCaseEvent
-from antlion.event.event import TestClassBeginEvent
-from antlion.event.event import TestClassEndEvent
-from antlion.event.event import TestClassEvent
+from antlion.event.event import (
+    Event,
+    TestCaseBeginEvent,
+    TestCaseEndEvent,
+    TestClassBeginEvent,
+    TestClassEndEvent,
+    TestClassEvent,
+)
 
 
 class ContextLevel(enum.IntEnum):
@@ -51,25 +52,6 @@
     return _contexts[min(depth, len(_contexts) - 1)]
 
 
-def get_context_for_event(event):
-    """Creates and returns a TestContext from the given event.
-    A TestClassContext is created for a TestClassEvent, and a TestCaseContext
-    is created for a TestCaseEvent.
-
-    Args:
-        event: An instance of TestCaseEvent or TestClassEvent.
-
-    Returns: An instance of TestContext corresponding to the event.
-
-    Raises: TypeError if event is neither a TestCaseEvent nor TestClassEvent
-    """
-    if isinstance(event, TestCaseEvent):
-        return _get_context_for_test_case_event(event)
-    if isinstance(event, TestClassEvent):
-        return _get_context_for_test_class_event(event)
-    raise TypeError("Unrecognized event type: %s %s", event, event.__class__)
-
-
 def _get_context_for_test_case_event(event):
     """Generate a TestCaseContext from the given TestCaseEvent."""
     return TestCaseContext(event.test_class, event.test_case)
@@ -339,7 +321,7 @@
 
     @property
     def identifier(self):
-        return "%s.%s" % (self.test_class_name, self.test_case_name)
+        return f"{self.test_class_name}.{self.test_case_name}"
 
     def _get_default_context_dir(self):
         """Gets the default output directory for this context.
diff --git a/src/antlion/controllers/OWNERS b/packages/antlion/controllers/OWNERS
similarity index 62%
rename from src/antlion/controllers/OWNERS
rename to packages/antlion/controllers/OWNERS
index ea76291..5e69e8c 100644
--- a/src/antlion/controllers/OWNERS
+++ b/packages/antlion/controllers/OWNERS
@@ -1,5 +1,5 @@
 per-file asus_axe11000_ap.py = martschneider@google.com
-per-file fuchsia_device.py = chcl@google.com, dhobsd@google.com, haydennix@google.com, jmbrenna@google.com, mnck@google.com, nickchee@google.com, sbalana@google.com, silberst@google.com, tturney@google.com
+per-file fuchsia_device.py = chcl@google.com, haydennix@google.com, jmbrenna@google.com, mnck@google.com, nickchee@google.com, sbalana@google.com, silberst@google.com, tturney@google.com
 per-file bluetooth_pts_device.py = tturney@google.com
 per-file cellular_simulator.py = iguarna@google.com, chaoyangf@google.com, codycaldwell@google.com, yixiang@google.com
 per-file openwrt_ap.py = jerrypcchen@google.com, martschneider@google.com, gmoturu@google.com, sishichen@google.com
diff --git a/packages/antlion/controllers/__init__.py b/packages/antlion/controllers/__init__.py
new file mode 100644
index 0000000..6d1ae5a
--- /dev/null
+++ b/packages/antlion/controllers/__init__.py
@@ -0,0 +1,47 @@
+#!/usr/bin/env python3
+#
+# Copyright 2024 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from . import (
+    access_point,
+    adb,
+    android_device,
+    attenuator,
+    fastboot,
+    fuchsia_device,
+    iperf_client,
+    iperf_server,
+    openwrt_ap,
+    packet_capture,
+    pdu,
+    sniffer,
+)
+
+# Reexport so static type checkers can find these modules when importing and
+# using antlion.controllers instead of "from antlion.controller import ..."
+__all__ = [
+    "access_point",
+    "adb",
+    "android_device",
+    "attenuator",
+    "fastboot",
+    "fuchsia_device",
+    "iperf_client",
+    "iperf_server",
+    "openwrt_ap",
+    "packet_capture",
+    "pdu",
+    "sniffer",
+]
diff --git a/src/antlion/controllers/access_point.py b/packages/antlion/controllers/access_point.py
similarity index 67%
rename from src/antlion/controllers/access_point.py
rename to packages/antlion/controllers/access_point.py
index 91a241d..36143a8 100755
--- a/src/antlion/controllers/access_point.py
+++ b/packages/antlion/controllers/access_point.py
@@ -14,13 +14,17 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import annotations
+
 import ipaddress
+import logging
+import os
 import time
-
 from dataclasses import dataclass
-from typing import Any, Dict, FrozenSet, List, Optional, Set, Tuple
+from typing import Any, FrozenSet
 
-from antlion import logger
+from mobly import logger
+
 from antlion import utils
 from antlion.capabilities.ssh import SSHConfig, SSHProvider
 from antlion.controllers.ap_lib import hostapd_constants
@@ -40,13 +44,15 @@
     BssTransitionManagementRequest,
 )
 from antlion.controllers.pdu import PduDevice, get_pdu_port_for_device
-from antlion.controllers.utils_lib.commands import ip
-from antlion.controllers.utils_lib.commands import route
-from antlion.controllers.utils_lib.ssh import connection
-from antlion.controllers.utils_lib.ssh import settings
-from antlion.libs.proc import job
+from antlion.controllers.utils_lib.commands import command, ip, journalctl, route
+from antlion.controllers.utils_lib.commands.date import LinuxDateCommand
+from antlion.controllers.utils_lib.commands.tcpdump import LinuxTcpdumpCommand
+from antlion.controllers.utils_lib.ssh import connection, settings
+from antlion.runner import CalledProcessError
+from antlion.types import ControllerConfig, Json
+from antlion.validation import MapValidator
 
-MOBLY_CONTROLLER_CONFIG_NAME = "AccessPoint"
+MOBLY_CONTROLLER_CONFIG_NAME: str = "AccessPoint"
 ACTS_CONTROLLER_REFERENCE_NAME = "access_points"
 
 
@@ -70,7 +76,45 @@
 BRIDGE_IP_LAST = "100"
 
 
-class AccessPoint(object):
+def create(configs: list[ControllerConfig]) -> list[AccessPoint]:
+    """Creates ap controllers from a json config.
+
+    Creates an ap controller from either a list, or a single
+    element. The element can either be just the hostname or a dictionary
+    containing the hostname and username of the ap to connect to over ssh.
+
+    Args:
+        The json configs that represent this controller.
+
+    Returns:
+        A new AccessPoint.
+    """
+    return [AccessPoint(c) for c in configs]
+
+
+def destroy(objects: list[AccessPoint]) -> None:
+    """Destroys a list of access points.
+
+    Args:
+        aps: The list of access points to destroy.
+    """
+    for ap in objects:
+        ap.close()
+
+
+def get_info(objects: list[AccessPoint]) -> list[Json]:
+    """Get information on a list of access points.
+
+    Args:
+        aps: A list of AccessPoints.
+
+    Returns:
+        A list of all aps hostname.
+    """
+    return [ap.ssh_settings.hostname for ap in objects]
+
+
+class AccessPoint:
     """An access point controller.
 
     Attributes:
@@ -79,27 +123,28 @@
         dhcp_settings: The dhcp server settings being used.
     """
 
-    def __init__(self, configs: Dict[str, Any]) -> None:
+    def __init__(self, config: ControllerConfig) -> None:
         """
         Args:
             configs: configs for the access point from config file.
         """
-        self.ssh_settings = settings.from_config(configs["ssh_config"])
-        self.log = logger.create_logger(
-            lambda msg: f"[Access Point|{self.ssh_settings.hostname}] {msg}"
+        c = MapValidator(config)
+        self.ssh_settings = settings.from_config(c.get(dict, "ssh_config"))
+        self.log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[Access Point|{self.ssh_settings.hostname}]",
+            },
         )
-        self.device_pdu_config = configs.get("PduDevice", None)
+        self.device_pdu_config = c.get(dict, "PduDevice", None)
         self.identifier = self.ssh_settings.hostname
 
-        if "ap_subnet" in configs:
-            self._AP_2G_SUBNET_STR: str = configs["ap_subnet"]["2g"]
-            self._AP_5G_SUBNET_STR: str = configs["ap_subnet"]["5g"]
-        else:
-            self._AP_2G_SUBNET_STR = _AP_2GHZ_SUBNET_STR_DEFAULT
-            self._AP_5G_SUBNET_STR = _AP_5GHZ_SUBNET_STR_DEFAULT
+        subnet = MapValidator(c.get(dict, "ap_subnet", {}))
+        self._AP_2G_SUBNET_STR = subnet.get(str, "2g", _AP_2GHZ_SUBNET_STR_DEFAULT)
+        self._AP_5G_SUBNET_STR = subnet.get(str, "5g", _AP_5GHZ_SUBNET_STR_DEFAULT)
 
-        self._AP_2G_SUBNET = Subnet(ipaddress.ip_network(self._AP_2G_SUBNET_STR))
-        self._AP_5G_SUBNET = Subnet(ipaddress.ip_network(self._AP_5G_SUBNET_STR))
+        self._AP_2G_SUBNET = Subnet(ipaddress.IPv4Network(self._AP_2G_SUBNET_STR))
+        self._AP_5G_SUBNET = Subnet(ipaddress.IPv4Network(self._AP_5G_SUBNET_STR))
 
         self.ssh = connection.SshConnection(self.ssh_settings)
 
@@ -116,21 +161,24 @@
         )
 
         # Singleton utilities for running various commands.
-        self._ip_cmd = ip.LinuxIpCommand(self.ssh)
-        self._route_cmd = route.LinuxRouteCommand(self.ssh)
+        self._ip_cmd = command.require(ip.LinuxIpCommand(self.ssh))
+        self._route_cmd = command.require(route.LinuxRouteCommand(self.ssh))
+        self._journalctl_cmd = command.require(
+            journalctl.LinuxJournalctlCommand(self.ssh)
+        )
 
         # A map from network interface name to _ApInstance objects representing
         # the hostapd instance running against the interface.
-        self._aps: Dict[str, _ApInstance] = dict()
-        self._dhcp: Optional[DhcpServer] = None
-        self._dhcp_bss: Dict[Any, Subnet] = dict()
-        self._radvd: Optional[Radvd] = None
+        self._aps: dict[str, _ApInstance] = dict()
+        self._dhcp: DhcpServer | None = None
+        self._dhcp_bss: dict[str, Subnet] = dict()
+        self._radvd: Radvd | None = None
         self.bridge = BridgeInterface(self)
         self.iwconfig = ApIwconfig(self)
 
         # Check to see if wan_interface is specified in acts_config for tests
         # isolated from the internet and set this override.
-        self.interfaces = ApInterfaces(self, configs.get("wan_interface"))
+        self.interfaces = ApInterfaces(self, c.get(str, "wan_interface", None))
 
         # Get needed interface names and initialize the unnecessary ones.
         self.wan = self.interfaces.get_wan_interface()
@@ -141,6 +189,13 @@
         self._initial_ap()
         self.setup_bridge = False
 
+        # Allow use of tcpdump
+        self.tcpdump = LinuxTcpdumpCommand(self.ssh_provider)
+
+        # Access points are not given internet access, so their system time needs to be
+        # manually set to be accurate.
+        LinuxDateCommand(self.ssh_provider).sync()
+
     def _initial_ap(self) -> None:
         """Initial AP interfaces.
 
@@ -153,11 +208,11 @@
         # process, otherwise test would fail.
         try:
             self.ssh.run("stop wpasupplicant")
-        except job.Error:
+        except CalledProcessError:
             self.log.info("No wpasupplicant running")
         try:
             self.ssh.run("stop hostapd")
-        except job.Error:
+        except CalledProcessError:
             self.log.info("No hostapd running")
         # Bring down all wireless interfaces
         for iface in self.wlan:
@@ -165,21 +220,20 @@
             self.ssh.run(WLAN_DOWN)
         # Bring down all bridge interfaces
         bridge_interfaces = self.interfaces.get_bridge_interface()
-        if bridge_interfaces:
-            for iface in bridge_interfaces:
-                BRIDGE_DOWN = f"ip link set {iface} down"
-                BRIDGE_DEL = f"brctl delbr {iface}"
-                self.ssh.run(BRIDGE_DOWN)
-                self.ssh.run(BRIDGE_DEL)
+        for iface in bridge_interfaces:
+            BRIDGE_DOWN = f"ip link set {iface} down"
+            BRIDGE_DEL = f"brctl delbr {iface}"
+            self.ssh.run(BRIDGE_DOWN)
+            self.ssh.run(BRIDGE_DEL)
 
     def start_ap(
         self,
         hostapd_config: HostapdConfig,
-        radvd_config: RadvdConfig = None,
+        radvd_config: RadvdConfig | None = None,
         setup_bridge: bool = False,
         is_nat_enabled: bool = True,
-        additional_parameters: Dict[str, Any] = None,
-    ) -> List[Any]:
+        additional_parameters: dict[str, Any] | None = None,
+    ) -> list[str]:
         """Starts as an ap using a set of configurations.
 
         This will start an ap on this host. To start an ap the controller
@@ -208,6 +262,9 @@
         Raises:
             Error: When the ap can't be brought up.
         """
+        if additional_parameters is None:
+            additional_parameters = {}
+
         if hostapd_config.frequency < 5000:
             interface = self.wlan_2g
             subnet = self._AP_2G_SUBNET
@@ -229,20 +286,35 @@
         # of the wireless interface needs to have enough space to mask out
         # up to 8 different mac addresses. So in for one interface the range is
         # hex 0-7 and for the other the range is hex 8-f.
-        interface_mac_orig = None
-        cmd = f"ip link show {interface}|grep ether|awk -F' ' '{{print $2}}'"
-        interface_mac_orig = self.ssh.run(cmd)
+        ip = self.ssh.run(["ip", "link", "show", interface])
+
+        # Example output:
+        # 5: wlan0: <BROADCAST,MULTICAST> mtu 1500 qdisc mq state DOWN mode DEFAULT group default qlen 1000
+        #     link/ether f4:f2:6d:aa:99:28 brd ff:ff:ff:ff:ff:ff
+
+        lines = ip.stdout.decode("utf-8").splitlines()
+        if len(lines) != 2:
+            raise RuntimeError(f"Expected 2 lines from ip link show, got {len(lines)}")
+        tokens = lines[1].split()
+        if len(tokens) != 4:
+            raise RuntimeError(
+                f"Expected 4 tokens from ip link show, got {len(tokens)}"
+            )
+        interface_mac_orig = tokens[1]
+
         if interface == self.wlan_5g:
-            hostapd_config.bssid = interface_mac_orig.stdout[:-1] + "0"
+            hostapd_config.bssid = f"{interface_mac_orig[:-1]}0"
             last_octet = 1
-        if interface == self.wlan_2g:
-            hostapd_config.bssid = interface_mac_orig.stdout[:-1] + "8"
+        elif interface == self.wlan_2g:
+            hostapd_config.bssid = f"{interface_mac_orig[:-1]}8"
             last_octet = 9
-        if interface in self._aps:
+        elif interface in self._aps:
             raise ValueError(
                 "No WiFi interface available for AP on "
                 f"channel {hostapd_config.channel}"
             )
+        else:
+            raise ValueError(f"Invalid WLAN interface: {interface}")
 
         apd = Hostapd(self.ssh, interface)
         new_instance = _ApInstance(hostapd=apd, subnet=subnet)
@@ -257,7 +329,7 @@
         # on the AP, but not for traffic handled by the Linux networking stack
         # such as ping.
         if radvd_config:
-            self._route_cmd.add_route(interface, "fe80::/64")
+            self._route_cmd.add_route(interface, ipaddress.IPv6Interface("fe80::/64"))
 
         self._dhcp_bss = dict()
         if hostapd_config.bss_lookup:
@@ -270,19 +342,18 @@
             # hostapd interfaces and not the DHCP servers for each
             # interface.
             counter = 1
-            for bss in hostapd_config.bss_lookup:
-                if interface_mac_orig:
-                    hostapd_config.bss_lookup[bss].bssid = (
-                        interface_mac_orig.stdout[:-1] + hex(last_octet)[-1:]
-                    )
-                self._route_cmd.clear_routes(net_interface=str(bss))
+            for iface in hostapd_config.bss_lookup:
+                hostapd_config.bss_lookup[iface].bssid = (
+                    interface_mac_orig[:-1] + hex(last_octet)[-1:]
+                )
+                self._route_cmd.clear_routes(net_interface=str(iface))
                 if interface is self.wlan_2g:
                     starting_ip_range = self._AP_2G_SUBNET_STR
                 else:
                     starting_ip_range = self._AP_5G_SUBNET_STR
                 a, b, c, d = starting_ip_range.split(".")
-                self._dhcp_bss[bss] = Subnet(
-                    ipaddress.ip_network(f"{a}.{b}.{int(c) + counter}.{d}")
+                self._dhcp_bss[iface] = Subnet(
+                    ipaddress.IPv4Network(f"{a}.{b}.{int(c) + counter}.{d}")
                 )
                 counter = counter + 1
                 last_octet = last_octet + 1
@@ -291,12 +362,15 @@
 
         # The DHCP serer requires interfaces to have ips and routes before
         # the server will come up.
-        interface_ip = ipaddress.ip_interface(
-            f"{subnet.router}/{subnet.network.netmask}"
+        interface_ip = ipaddress.IPv4Interface(
+            f"{subnet.router}/{subnet.network.prefixlen}"
         )
         if setup_bridge is True:
             bridge_interface_name = "eth_test"
-            self.create_bridge(bridge_interface_name, [interface, self.lan])
+            interfaces = [interface]
+            if self.lan:
+                interfaces.append(self.lan)
+            self.create_bridge(bridge_interface_name, interfaces)
             self._ip_cmd.set_ipv4_address(bridge_interface_name, interface_ip)
         else:
             self._ip_cmd.set_ipv4_address(interface, interface_ip)
@@ -305,11 +379,11 @@
             # hostapd and assigns the DHCP scopes that were defined but
             # not used during the hostapd loop above.  The k and v
             # variables represent the interface name, k, and dhcp info, v.
-            for k, v in self._dhcp_bss.items():
-                bss_interface_ip = ipaddress.ip_interface(
-                    f"{self._dhcp_bss[k].router}/{self._dhcp_bss[k].network.netmask}"
+            for iface, subnet in self._dhcp_bss.items():
+                bss_interface_ip = ipaddress.IPv4Interface(
+                    f"{subnet.router}/{subnet.network.prefixlen}"
                 )
-                self._ip_cmd.set_ipv4_address(str(k), bss_interface_ip)
+                self._ip_cmd.set_ipv4_address(iface, bss_interface_ip)
 
         # Restart the DHCP server with our updated list of subnets.
         configured_subnets = self.get_configured_subnets()
@@ -333,7 +407,7 @@
 
         return bss_interfaces
 
-    def get_configured_subnets(self) -> List[Subnet]:
+    def get_configured_subnets(self) -> list[Subnet]:
         """Get the list of configured subnets on the access point.
 
         This allows consumers of the access point objects create custom DHCP
@@ -357,16 +431,22 @@
         Raises:
             Error: Raised when a dhcp server error is found.
         """
-        self._dhcp.start(config=dhcp_conf)
+        if self._dhcp is not None:
+            self._dhcp.start(config=dhcp_conf)
 
     def stop_dhcp(self) -> None:
         """Stop DHCP for this AP object.
 
         This allows consumers of the access point objects to control DHCP.
         """
-        self._dhcp.stop()
+        if self._dhcp is not None:
+            self._dhcp.stop()
 
-    def get_dhcp_logs(self) -> Optional[str]:
+    def get_systemd_journal(self) -> str:
+        """Get systemd journal logs from this current boot."""
+        return self._journalctl_cmd.logs()
+
+    def get_dhcp_logs(self) -> str | None:
         """Get DHCP logs for this AP object.
 
         This allows consumers of the access point objects to validate DHCP
@@ -376,11 +456,11 @@
             A string of the dhcp server logs, or None is a DHCP server has not
             been started.
         """
-        if self._dhcp:
+        if self._dhcp is not None:
             return self._dhcp.get_logs()
         return None
 
-    def get_hostapd_logs(self) -> Dict[str, str]:
+    def get_hostapd_logs(self) -> dict[str, str]:
         """Get hostapd logs for all interfaces on AP object.
 
         This allows consumers of the access point objects to validate hostapd
@@ -388,12 +468,12 @@
 
         Returns: A dict with {interface: log} from hostapd instances.
         """
-        hostapd_logs = dict()
-        for identifier in self._aps:
-            hostapd_logs[identifier] = self._aps.get(identifier).hostapd.pull_logs()
+        hostapd_logs: dict[str, str] = dict()
+        for iface, ap in self._aps.items():
+            hostapd_logs[iface] = ap.hostapd.pull_logs()
         return hostapd_logs
 
-    def get_radvd_logs(self) -> Optional[str]:
+    def get_radvd_logs(self) -> str | None:
         """Get radvd logs for this AP object.
 
         This allows consumers of the access point objects to validate radvd
@@ -407,6 +487,49 @@
             return self._radvd.pull_logs()
         return None
 
+    def download_ap_logs(self, path: str) -> None:
+        """Download all available logs to path.
+
+        This convenience method gets all the logs, dhcp, hostapd, radvd. It
+        writes these to the given path.
+
+        Args:
+            path: Path to write logs to.
+        """
+        timestamp = logger.normalize_log_line_timestamp(
+            logger.epoch_to_log_line_timestamp(utils.get_current_epoch_time())
+        )
+
+        dhcp_log = self.get_dhcp_logs()
+        if dhcp_log:
+            dhcp_log_path = os.path.join(path, f"ap_dhcp_{timestamp}.log")
+            with open(dhcp_log_path, "a") as f:
+                f.write(dhcp_log)
+            self.log.debug(f"Wrote DHCP logs to {dhcp_log_path}")
+
+        hostapd_logs = self.get_hostapd_logs()
+        for interface in hostapd_logs:
+            hostapd_log_path = os.path.join(
+                path,
+                f"ap_hostapd_{interface}_{timestamp}.log",
+            )
+            with open(hostapd_log_path, "a") as f:
+                f.write(hostapd_logs[interface])
+            self.log.debug(f"Wrote hostapd logs to {hostapd_log_path}")
+
+        radvd_log = self.get_radvd_logs()
+        if radvd_log:
+            radvd_log_path = os.path.join(path, f"ap_radvd_{timestamp}.log")
+            with open(radvd_log_path, "a") as f:
+                f.write(radvd_log)
+            self.log.debug(f"Wrote radvd logs to {radvd_log_path}")
+
+        systemd_journal = self.get_systemd_journal()
+        systemd_journal_path = os.path.join(path, f"ap_systemd_{timestamp}.log")
+        with open(systemd_journal_path, "a") as f:
+            f.write(systemd_journal)
+        self.log.debug(f"Wrote systemd journal to {systemd_journal_path}")
+
     def enable_forwarding(self) -> None:
         """Enable IPv4 and IPv6 forwarding on the AP.
 
@@ -443,7 +566,7 @@
         """
         self.ssh.run("iptables -t nat -F")
 
-    def create_bridge(self, bridge_name: str, interfaces: List[str]) -> None:
+    def create_bridge(self, bridge_name: str, interfaces: list[str]) -> None:
         """Create the specified bridge and bridge the specified interfaces.
 
         Args:
@@ -475,39 +598,54 @@
 
         # If the bridge exists, we'll get an exit_status of 0, indicating
         # success, so we can continue and remove the bridge.
-        if result.exit_status == 0:
+        if result.returncode == 0:
             self.ssh.run(f"ip link set {bridge_name} down")
             self.ssh.run(f"brctl delbr {bridge_name}")
 
-    def get_bssid_from_ssid(self, ssid: str, band: str) -> Optional[str]:
+    def get_bssid_from_ssid(self, ssid: str, band: hostapd_constants.BandType) -> str:
         """Gets the BSSID from a provided SSID
 
         Args:
             ssid: An SSID string.
             band: 2G or 5G Wifi band.
-        Returns: The BSSID if on the AP or None if SSID could not be found.
+
+        Returns:
+            The BSSID of on the AP hosting the given SSID on the given band.
+
+        Raises:
+            RuntimeError: when interface, ssid, or addr cannot be found.
         """
-        if band == hostapd_constants.BAND_2G:
-            interfaces = [self.wlan_2g, ssid]
-        else:
-            interfaces = [self.wlan_5g, ssid]
+        match band:
+            case hostapd_constants.BandType.BAND_2G:
+                interface = self.wlan_2g
+            case hostapd_constants.BandType.BAND_5G:
+                interface = self.wlan_5g
 
         # Get the interface name associated with the given ssid.
-        for interface in interfaces:
-            iw_output = self.ssh.run(
-                f"iw dev {interface} info|grep ssid|awk -F' ' '{{print $2}}'"
-            )
-            if "command failed: No such device" in iw_output.stderr:
-                continue
-            else:
-                # If the configured ssid is equal to the given ssid, we found
-                # the right interface.
-                if iw_output.stdout == ssid:
-                    iw_output = self.ssh.run(
-                        f"iw dev {interface} info|grep addr|awk -F' ' '{{print $2}}'"
-                    )
-                    return iw_output.stdout
-        return None
+        iw = self.ssh.run(["iw", "dev", interface, "info"])
+        if b"command failed: No such device" in iw.stderr:
+            raise RuntimeError(f'iw dev did not contain interface "{interface}"')
+
+        iw_out = iw.stdout.decode("utf-8")
+        iw_lines = iw_out.splitlines()
+
+        for line in iw_lines:
+            if "ssid" in line and ssid in line:
+                # Found the right interface.
+                for line in iw_lines:
+                    if "addr" in line:
+                        tokens = line.split()
+                        if len(tokens) != 2:
+                            raise RuntimeError(
+                                f"Expected iw dev info addr to have 2 tokens, got {tokens}"
+                            )
+                        return tokens[1]
+
+                raise RuntimeError(
+                    f"iw dev info contained ssid but not addr: \n{iw_out}"
+                )
+
+        raise RuntimeError(f'iw dev did not contain ssid "{ssid}"')
 
     def stop_ap(self, identifier: str) -> None:
         """Stops a running ap on this controller.
@@ -516,10 +654,9 @@
             identifier: The identify of the ap that should be taken down.
         """
 
-        if identifier not in list(self._aps.keys()):
-            raise ValueError(f"Invalid identifier {identifier} given")
-
         instance = self._aps.get(identifier)
+        if instance is None:
+            raise ValueError(f"Invalid identifier {identifier} given")
 
         if self._radvd:
             self._radvd.stop()
@@ -533,12 +670,11 @@
 
         del self._aps[identifier]
         bridge_interfaces = self.interfaces.get_bridge_interface()
-        if bridge_interfaces:
-            for iface in bridge_interfaces:
-                BRIDGE_DOWN = f"ip link set {iface} down"
-                BRIDGE_DEL = f"brctl delbr {iface}"
-                self.ssh.run(BRIDGE_DOWN)
-                self.ssh.run(BRIDGE_DEL)
+        for iface in bridge_interfaces:
+            BRIDGE_DOWN = f"ip link set {iface} down"
+            BRIDGE_DEL = f"brctl delbr {iface}"
+            self.ssh.run(BRIDGE_DOWN)
+            self.ssh.run(BRIDGE_DEL)
 
     def stop_all_aps(self) -> None:
         """Stops all running aps on this device."""
@@ -557,7 +693,7 @@
             self.stop_all_aps()
         self.ssh.close()
 
-    def generate_bridge_configs(self, channel: int) -> Tuple[str, Optional[str], str]:
+    def generate_bridge_configs(self, channel: int) -> tuple[str, str | None, str]:
         """Generate a list of configs for a bridge between LAN and WLAN.
 
         Args:
@@ -588,8 +724,8 @@
         interval: int = 1000,
         timeout: int = 1000,
         size: int = 56,
-        additional_ping_params: Optional[Any] = None,
-    ) -> Dict[str, Any]:
+        additional_ping_params: str = "",
+    ) -> utils.PingResult:
         """Pings from AP to dest_ip, returns dict of ping stats (see utils.ping)"""
         return utils.ping(
             self.ssh,
@@ -601,43 +737,15 @@
             additional_ping_params=additional_ping_params,
         )
 
-    def can_ping(
-        self,
-        dest_ip: str,
-        count: int = 1,
-        interval: int = 1000,
-        timeout: int = 1000,
-        size: int = 56,
-        additional_ping_params: Optional[Any] = None,
-    ) -> bool:
-        """Returns whether ap can ping dest_ip (see utils.can_ping)"""
-        return utils.can_ping(
-            self.ssh,
-            dest_ip,
-            count=count,
-            interval=interval,
-            timeout=timeout,
-            size=size,
-            additional_ping_params=additional_ping_params,
-        )
-
     def hard_power_cycle(
         self,
-        pdus: List[PduDevice],
-        hostapd_configs: Optional[List[HostapdConfig]] = None,
+        pdus: list[PduDevice],
     ) -> None:
         """Kills, then restores power to AccessPoint, verifying it goes down and
         comes back online cleanly.
 
         Args:
             pdus: PDUs in the testbed
-            hostapd_configs: Hostapd settings. If present, these networks will
-                be spun up after the AP has rebooted. This list can either
-                contain HostapdConfig objects, or dictionaries with the start_ap
-                params
-                    (i.e  { 'hostapd_config': <HostapdConfig>,
-                            'setup_bridge': <bool>,
-                            'additional_parameters': <dict> } ).
         Raise:
             Error, if no PduDevice is provided in AccessPoint config.
             ConnectionError, if AccessPoint fails to go offline or come back.
@@ -645,14 +753,13 @@
         if not self.device_pdu_config:
             raise Error("No PduDevice provided in AccessPoint config.")
 
-        if hostapd_configs is None:
-            hostapd_configs = []
+        self._journalctl_cmd.save_and_reset()
 
-        self.log.info(f"Power cycling")
+        self.log.info("Power cycling")
         ap_pdu, ap_pdu_port = get_pdu_port_for_device(self.device_pdu_config, pdus)
 
-        self.log.info(f"Killing power")
-        ap_pdu.off(str(ap_pdu_port))
+        self.log.info("Killing power")
+        ap_pdu.off(ap_pdu_port)
 
         self.log.info("Verifying AccessPoint is unreachable.")
         self.ssh_provider.wait_until_unreachable()
@@ -660,8 +767,8 @@
 
         self._aps.clear()
 
-        self.log.info(f"Restoring power")
-        ap_pdu.on(str(ap_pdu_port))
+        self.log.info("Restoring power")
+        ap_pdu.on(ap_pdu_port)
 
         self.log.info("Waiting for AccessPoint to become available via SSH.")
         self.ssh_provider.wait_until_reachable()
@@ -672,68 +779,71 @@
         self._initial_ap()
         self.log.info("Power cycled successfully")
 
-        for settings in hostapd_configs:
-            if type(settings) == HostapdConfig:
-                config = settings
-                setup_bridge = False
-                additional_parameters = None
-
-            elif type(settings) == dict:
-                config = settings["hostapd_config"]
-                setup_bridge = settings.get("setup_bridge", False)
-                additional_parameters = settings.get("additional_parameters", None)
-            else:
-                raise TypeError(
-                    "Items in hostapd_configs list must either be "
-                    "HostapdConfig objects or dictionaries."
-                )
-
-            self.log.info(f"Restarting network {config.ssid}")
-            self.start_ap(
-                config,
-                setup_bridge=setup_bridge,
-                additional_parameters=additional_parameters,
-            )
-
-    def channel_switch(self, identifier: str, channel_num: int) -> None:
+    def channel_switch(
+        self, identifier: str, channel_num: int, csa_beacon_count: int = 10
+    ) -> None:
         """Switch to a different channel on the given AP."""
-        if identifier not in list(self._aps.keys()):
-            raise ValueError(f"Invalid identifier {identifier} given")
         instance = self._aps.get(identifier)
+        if instance is None:
+            raise ValueError(f"Invalid identifier {identifier} given")
         self.log.info(f"channel switch to channel {channel_num}")
-        instance.hostapd.channel_switch(channel_num)
+        instance.hostapd.channel_switch(channel_num, csa_beacon_count)
 
     def get_current_channel(self, identifier: str) -> int:
         """Find the current channel on the given AP."""
-        if identifier not in list(self._aps.keys()):
-            raise ValueError(f"Invalid identifier {identifier} given")
         instance = self._aps.get(identifier)
+        if instance is None:
+            raise ValueError(f"Invalid identifier {identifier} given")
         return instance.hostapd.get_current_channel()
 
-    def get_stas(self, identifier: str) -> Set[str]:
+    def get_stas(self, identifier: str) -> set[str]:
         """Return MAC addresses of all associated STAs on the given AP."""
-        if identifier not in list(self._aps.keys()):
-            raise ValueError(f"Invalid identifier {identifier} given")
         instance = self._aps.get(identifier)
+        if instance is None:
+            raise ValueError(f"Invalid identifier {identifier} given")
         return instance.hostapd.get_stas()
 
+    def sta_authenticated(self, identifier: str, sta_mac: str) -> bool:
+        """Is STA authenticated?"""
+        instance = self._aps.get(identifier)
+        if instance is None:
+            raise ValueError(f"Invalid identifier {identifier} given")
+        return instance.hostapd.sta_authenticated(sta_mac)
+
+    def sta_associated(self, identifier: str, sta_mac: str) -> bool:
+        """Is STA associated?"""
+        instance = self._aps.get(identifier)
+        if instance is None:
+            raise ValueError(f"Invalid identifier {identifier} given")
+        return instance.hostapd.sta_associated(sta_mac)
+
+    def sta_authorized(self, identifier: str, sta_mac: str) -> bool:
+        """Is STA authorized (802.1X controlled port open)?"""
+        instance = self._aps.get(identifier)
+        if instance is None:
+            raise ValueError(f"Invalid identifier {identifier} given")
+        return instance.hostapd.sta_authorized(sta_mac)
+
     def get_sta_extended_capabilities(
         self, identifier: str, sta_mac: str
     ) -> ExtendedCapabilities:
         """Get extended capabilities for the given STA, as seen by the AP."""
-        if identifier not in list(self._aps.keys()):
-            raise ValueError(f"Invalid identifier {identifier} given")
         instance = self._aps.get(identifier)
+        if instance is None:
+            raise ValueError(f"Invalid identifier {identifier} given")
         return instance.hostapd.get_sta_extended_capabilities(sta_mac)
 
     def send_bss_transition_management_req(
-        self, identifier: str, sta_mac: str, request: BssTransitionManagementRequest
-    ) -> job.Result:
+        self,
+        identifier: str,
+        sta_mac: str,
+        request: BssTransitionManagementRequest,
+    ) -> None:
         """Send a BSS Transition Management request to an associated STA."""
-        if identifier not in list(self._aps.keys()):
-            raise ValueError("Invalid identifier {identifier} given")
         instance = self._aps.get(identifier)
-        return instance.hostapd.send_bss_transition_management_req(sta_mac, request)
+        if instance is None:
+            raise ValueError(f"Invalid identifier {identifier} given")
+        instance.hostapd.send_bss_transition_management_req(sta_mac, request)
 
 
 def setup_ap(
@@ -741,26 +851,25 @@
     profile_name: str,
     channel: int,
     ssid: str,
-    mode: Optional[str] = None,
-    preamble: Optional[bool] = None,
-    beacon_interval: Optional[int] = None,
-    dtim_period: Optional[int] = None,
-    frag_threshold: Optional[int] = None,
-    rts_threshold: Optional[int] = None,
-    force_wmm: Optional[bool] = None,
-    hidden: Optional[bool] = False,
-    security: Optional[Security] = None,
-    pmf_support: Optional[int] = None,
-    additional_ap_parameters: Optional[Dict[str, Any]] = None,
-    password: Optional[str] = None,
-    n_capabilities: Optional[List[Any]] = None,
-    ac_capabilities: Optional[List[Any]] = None,
-    vht_bandwidth: Optional[int] = None,
+    mode: str | None = None,
+    preamble: bool | None = None,
+    beacon_interval: int | None = None,
+    dtim_period: int | None = None,
+    frag_threshold: int | None = None,
+    rts_threshold: int | None = None,
+    force_wmm: bool | None = None,
+    hidden: bool | None = False,
+    security: Security | None = None,
+    pmf_support: int | None = None,
+    additional_ap_parameters: dict[str, Any] | None = None,
+    n_capabilities: list[Any] | None = None,
+    ac_capabilities: list[Any] | None = None,
+    vht_bandwidth: int | None = None,
     wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(),
     setup_bridge: bool = False,
     is_ipv6_enabled: bool = False,
     is_nat_enabled: bool = True,
-):
+) -> list[str]:
     """Creates a hostapd profile and runs it on an ap. This is a convenience
     function that allows us to start an ap with a single function, without first
     creating a hostapd config.
@@ -779,7 +888,6 @@
         security: What security to enable.
         pmf_support: Whether pmf is not disabled, enabled, or required
         additional_ap_parameters: Additional parameters to send the AP.
-        password: Password to connect to WLAN if necessary.
         check_connectivity: Whether to check for internet connectivity.
         wnm_features: WNM features to enable on the AP.
         setup_bridge: Whether to bridge the LAN interface WLAN interface.
@@ -796,6 +904,9 @@
     Raises:
         Error: When the ap can't be brought up.
     """
+    if additional_ap_parameters is None:
+        additional_ap_parameters = {}
+
     ap = create_ap_preset(
         profile_name=profile_name,
         iface_wlan_2g=access_point.wlan_2g,
@@ -825,41 +936,3 @@
         is_nat_enabled=is_nat_enabled,
         additional_parameters=additional_ap_parameters,
     )
-
-
-def create(configs: Any) -> List[AccessPoint]:
-    """Creates ap controllers from a json config.
-
-    Creates an ap controller from either a list, or a single
-    element. The element can either be just the hostname or a dictionary
-    containing the hostname and username of the ap to connect to over ssh.
-
-    Args:
-        The json configs that represent this controller.
-
-    Returns:
-        A new AccessPoint.
-    """
-    return [AccessPoint(c) for c in configs]
-
-
-def destroy(aps: List[AccessPoint]) -> None:
-    """Destroys a list of access points.
-
-    Args:
-        aps: The list of access points to destroy.
-    """
-    for ap in aps:
-        ap.close()
-
-
-def get_info(aps: List[AccessPoint]) -> List[str]:
-    """Get information on a list of access points.
-
-    Args:
-        aps: A list of AccessPoints.
-
-    Returns:
-        A list of all aps hostname.
-    """
-    return [ap.ssh_settings.hostname for ap in aps]
diff --git a/src/antlion/controllers/adb.py b/packages/antlion/controllers/adb.py
similarity index 93%
rename from src/antlion/controllers/adb.py
rename to packages/antlion/controllers/adb.py
index 5c3848d..61597ff 100644
--- a/src/antlion/controllers/adb.py
+++ b/packages/antlion/controllers/adb.py
@@ -19,8 +19,7 @@
 import shlex
 import shutil
 
-from antlion.controllers.adb_lib.error import AdbCommandError
-from antlion.controllers.adb_lib.error import AdbError
+from antlion.controllers.adb_lib.error import AdbCommandError, AdbError
 from antlion.libs.proc import job
 
 DEFAULT_ADB_TIMEOUT = 60
@@ -78,7 +77,7 @@
         adb_path = shutil.which("adb")
         adb_cmd = [shlex.quote(adb_path)]
         if serial:
-            adb_cmd.append("-s %s" % serial)
+            adb_cmd.append(f"-s {serial}")
         if ssh_connection is not None:
             # Kill all existing adb processes on the remote host (if any)
             # Note that if there are none, then pkill exits with non-zero status
@@ -97,7 +96,7 @@
             self._server_local_port = local_port
 
         if self._server_local_port:
-            adb_cmd.append("-P %d" % local_port)
+            adb_cmd.append(f"-P {local_port}")
         self.adb_str = " ".join(adb_cmd)
         self._ssh_connection = ssh_connection
 
@@ -159,7 +158,7 @@
         """
         if isinstance(cmd, list):
             cmd = " ".join(cmd)
-        result = job.run(cmd, ignore_status=True, timeout=timeout)
+        result = job.run(cmd, ignore_status=True, timeout_sec=timeout)
         ret, out, err = result.exit_status, result.stdout, result.stderr
 
         if any(
@@ -181,7 +180,7 @@
         return out
 
     def _exec_adb_cmd(self, name, arg_str, **kwargs):
-        return self._exec_cmd(" ".join((self.adb_str, name, arg_str)), **kwargs)
+        return self._exec_cmd(f"{self.adb_str} {name} {arg_str}", **kwargs)
 
     def _exec_cmd_nb(self, cmd, **kwargs):
         """Executes adb commands in a new shell, non blocking.
@@ -193,7 +192,7 @@
         return job.run_async(cmd, **kwargs)
 
     def _exec_adb_cmd_nb(self, name, arg_str, **kwargs):
-        return self._exec_cmd_nb(" ".join((self.adb_str, name, arg_str)), **kwargs)
+        return self._exec_cmd_nb(f"{self.adb_str} {name} {arg_str}", **kwargs)
 
     def tcp_forward(self, host_port, device_port):
         """Starts tcp forwarding from localhost to this android device.
@@ -214,9 +213,7 @@
             host_port = self._ssh_connection.create_ssh_tunnel(
                 remote_port, local_port=host_port
             )
-        output = self.forward(
-            "tcp:%d tcp:%d" % (host_port, device_port), ignore_status=True
-        )
+        output = self.forward(f"tcp:{host_port} tcp:{device_port}", ignore_status=True)
         # If hinted_port is 0, the output will be the selected port.
         # Otherwise, there will be no output upon successfully
         # forwarding the hinted port.
@@ -243,7 +240,7 @@
                 return
             # The actual port we need to disable via adb is on the remote host.
             host_port = remote_port
-        self.forward("--remove tcp:%d" % host_port)
+        self.forward(f"--remove tcp:{host_port}")
 
     def getprop(self, prop_name):
         """Get a property of the device.
@@ -257,7 +254,7 @@
             A string that is the value of the property, or None if the property
             doesn't exist.
         """
-        return self.shell("getprop %s" % prop_name)
+        return self.shell(f"getprop {prop_name}")
 
     # TODO: This should be abstracted out into an object like the other shell
     # command.
diff --git a/src/antlion/controllers/adb_lib/__init__.py b/packages/antlion/controllers/adb_lib/__init__.py
similarity index 100%
rename from src/antlion/controllers/adb_lib/__init__.py
rename to packages/antlion/controllers/adb_lib/__init__.py
diff --git a/src/antlion/controllers/adb_lib/error.py b/packages/antlion/controllers/adb_lib/error.py
similarity index 100%
rename from src/antlion/controllers/adb_lib/error.py
rename to packages/antlion/controllers/adb_lib/error.py
diff --git a/src/antlion/controllers/android_device.py b/packages/antlion/controllers/android_device.py
similarity index 89%
rename from src/antlion/controllers/android_device.py
rename to packages/antlion/controllers/android_device.py
index 0eb0969..b993ff2 100755
--- a/src/antlion/controllers/android_device.py
+++ b/packages/antlion/controllers/android_device.py
@@ -14,6 +14,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import annotations
+
 import collections
 import logging
 import math
@@ -24,24 +26,22 @@
 import time
 from datetime import datetime
 
-from antlion import context
-from antlion import logger as acts_logger
-from antlion import tracelogger
-from antlion import utils
-from antlion.controllers import adb
+from mobly import logger
+
+from antlion import context, utils
+from antlion.controllers import adb, fastboot
 from antlion.controllers.adb_lib.error import AdbError
-from antlion.controllers import fastboot
 from antlion.controllers.android_lib import errors
 from antlion.controllers.android_lib import events as android_events
-from antlion.controllers.android_lib import logcat
-from antlion.controllers.android_lib import services
+from antlion.controllers.android_lib import logcat, services
 from antlion.controllers.sl4a_lib import sl4a_manager
-from antlion.controllers.utils_lib.ssh import connection
-from antlion.controllers.utils_lib.ssh import settings
+from antlion.controllers.utils_lib.ssh import connection, settings
 from antlion.event import event_bus
 from antlion.libs.proc import job
+from antlion.runner import Runner
+from antlion.types import ControllerConfig, Json
 
-MOBLY_CONTROLLER_CONFIG_NAME = "AndroidDevice"
+MOBLY_CONTROLLER_CONFIG_NAME: str = "AndroidDevice"
 ACTS_CONTROLLER_REFERENCE_NAME = "android_devices"
 
 ANDROID_DEVICE_PICK_ALL_TOKEN = "*"
@@ -84,7 +84,7 @@
 RELEASE_ID_REGEXES = [re.compile(r"\w+\.\d+\.\d+"), re.compile(r"N\w+")]
 
 
-def create(configs):
+def create(configs: list[ControllerConfig]) -> list[AndroidDevice]:
     """Creates AndroidDevice controller objects.
 
     Args:
@@ -96,8 +96,6 @@
     """
     if not configs:
         raise errors.AndroidDeviceConfigError(ANDROID_DEVICE_EMPTY_CONFIG_MSG)
-    elif configs == ANDROID_DEVICE_PICK_ALL_TOKEN:
-        ads = get_all_instances()
     elif not isinstance(configs, list):
         raise errors.AndroidDeviceConfigError(ANDROID_DEVICE_NOT_LIST_CONFIG_MSG)
     elif isinstance(configs[0], str):
@@ -107,7 +105,7 @@
         # Configs is a list of dicts.
         ads = get_instances_with_configs(configs)
 
-    ads[0].log.info('The primary device under test is "%s".' % ads[0].serial)
+    ads[0].log.info(f'The primary device under test is "{ads[0].serial}".')
 
     for ad in ads:
         if not ad.is_connected():
@@ -124,20 +122,20 @@
     return ads
 
 
-def destroy(ads):
+def destroy(objects: list[AndroidDevice]) -> None:
     """Cleans up AndroidDevice objects.
 
     Args:
         ads: A list of AndroidDevice objects.
     """
-    for ad in ads:
+    for ad in objects:
         try:
             ad.clean_up()
         except:
             ad.log.exception("Failed to clean up properly.")
 
 
-def get_info(ads):
+def get_info(objects: list[AndroidDevice]) -> list[Json]:
     """Get information on a list of AndroidDevice objects.
 
     Args:
@@ -146,8 +144,8 @@
     Returns:
         A list of dict, each representing info for an AndroidDevice objects.
     """
-    device_info = []
-    for ad in ads:
+    device_info: list[Json] = []
+    for ad in objects:
         info = {"serial": ad.serial, "model": ad.model}
         info.update(ad.build_info)
         device_info.append(info)
@@ -210,7 +208,7 @@
     return _parse_device_list(out, "fastboot")
 
 
-def get_instances(serials):
+def get_instances(serials) -> list[AndroidDevice]:
     """Create AndroidDevice instances from a list of serials.
 
     Args:
@@ -219,7 +217,7 @@
     Returns:
         A list of AndroidDevice objects.
     """
-    results = []
+    results: list[AndroidDevice] = []
     for s in serials:
         results.append(AndroidDevice(s))
     return results
@@ -243,7 +241,7 @@
             serial = c.pop("serial")
         except KeyError:
             raise errors.AndroidDeviceConfigError(
-                "Required value 'serial' is missing in AndroidDevice config %s." % c
+                f"Required value 'serial' is missing in AndroidDevice config {c}."
             )
         client_port = 0
         if ANDROID_DEVICE_SL4A_CLIENT_PORT_KEY in c:
@@ -289,7 +287,7 @@
     return results
 
 
-def get_all_instances(include_fastboot=False):
+def get_all_instances(include_fastboot: bool = False) -> list[AndroidDevice]:
     """Create AndroidDevice instances for all attached android devices.
 
     Args:
@@ -355,13 +353,13 @@
     filtered = filter_devices(ads, _get_device_filter)
     if not filtered:
         raise ValueError(
-            "Could not find a target device that matches condition: %s." % kwargs
+            f"Could not find a target device that matches condition: {kwargs}."
         )
     elif len(filtered) == 1:
         return filtered[0]
     else:
         serials = [ad.serial for ad in filtered]
-        raise ValueError("More than one device matched: %s" % serials)
+        raise ValueError(f"More than one device matched: {serials}")
 
 
 def take_bug_reports(ads, test_name, begin_time):
@@ -412,23 +410,21 @@
 
     def __init__(
         self,
-        serial="",
-        ssh_connection=None,
-        client_port=0,
-        forwarded_port=0,
-        server_port=None,
+        serial: str = "",
+        ssh_connection: Runner | None = None,
+        client_port: int = 0,
+        forwarded_port: int = 0,
+        server_port: int | None = None,
     ):
         self.serial = serial
         # logging.log_path only exists when this is used in an ACTS test run.
         log_path_base = getattr(logging, "log_path", "/tmp/logs")
-        self.log_dir = "AndroidDevice%s" % serial
+        self.log_dir = f"AndroidDevice{serial}"
         self.log_path = os.path.join(log_path_base, self.log_dir)
         self.client_port = client_port
         self.forwarded_port = forwarded_port
         self.server_port = server_port
-        self.log = tracelogger.TraceLogger(
-            AndroidDeviceLoggerAdapter(logging.getLogger(), {"serial": serial})
-        )
+        self.log = AndroidDeviceLoggerAdapter(logging.getLogger(), {"serial": serial})
         self._event_dispatchers = {}
         self._services = []
         self.register_service(services.AdbLogcatService(self))
@@ -694,7 +690,7 @@
             # skip_sl4a value can be reset from config file
             if hasattr(self, k) and k != "skip_sl4a":
                 raise errors.AndroidDeviceError(
-                    "Attempting to set existing attribute %s on %s" % (k, self.serial),
+                    f"Attempting to set existing attribute {k} on {self.serial}",
                     serial=self.serial,
                 )
             setattr(self, k, v)
@@ -710,7 +706,7 @@
 
         for attempt in range(ADB_ROOT_RETRY_COUNT):
             try:
-                self.log.debug("Enabling ADB root mode: attempt %d." % attempt)
+                self.log.debug(f"Enabling ADB root mode: attempt {attempt}.")
                 self.adb.root()
             except AdbError:
                 if attempt == ADB_ROOT_RETRY_COUNT:
@@ -774,7 +770,7 @@
         for cmd in ("ps -A", "ps"):
             try:
                 out = self.adb.shell(
-                    '%s | grep "S %s"' % (cmd, package_name), ignore_status=True
+                    f'{cmd} | grep "S {package_name}"', ignore_status=True
                 )
                 if package_name not in out:
                     continue
@@ -813,8 +809,8 @@
         return self._sl4a_manager.sessions[droid.uid].get_event_dispatcher()
 
     def _is_timestamp_in_range(self, target, log_begin_time, log_end_time):
-        low = acts_logger.logline_timestamp_comparator(log_begin_time, target) <= 0
-        high = acts_logger.logline_timestamp_comparator(log_end_time, target) >= 0
+        low = logger.logline_timestamp_comparator(log_begin_time, target) <= 0
+        high = logger.logline_timestamp_comparator(log_end_time, target) >= 0
         return low and high
 
     def cat_adb_log(self, tag, begin_time, end_time=None, dest_path="AdbLogExcerpts"):
@@ -827,26 +823,26 @@
             end_time: Epoch time of the ending of the time period, default None
             dest_path: Destination path of the excerpt file.
         """
-        log_begin_time = acts_logger.epoch_to_log_line_timestamp(begin_time)
+        log_begin_time = logger.epoch_to_log_line_timestamp(begin_time)
         if end_time is None:
-            log_end_time = acts_logger.get_log_line_timestamp()
+            log_end_time = logger.get_log_line_timestamp()
         else:
-            log_end_time = acts_logger.epoch_to_log_line_timestamp(end_time)
+            log_end_time = logger.epoch_to_log_line_timestamp(end_time)
         self.log.debug("Extracting adb log from logcat.")
         logcat_path = os.path.join(
-            self.device_log_path, "adblog_%s_debug.txt" % self.serial
+            self.device_log_path, f"adblog_{self.serial}_debug.txt"
         )
         if not os.path.exists(logcat_path):
-            self.log.warning("Logcat file %s does not exist." % logcat_path)
+            self.log.warning(f"Logcat file {logcat_path} does not exist.")
             return
         adb_excerpt_dir = os.path.join(self.log_path, dest_path)
         os.makedirs(adb_excerpt_dir, exist_ok=True)
         out_name = "%s,%s.txt" % (
-            acts_logger.normalize_log_line_timestamp(log_begin_time),
+            logger.sanitize_filename(log_begin_time),
             self.serial,
         )
         tag_len = utils.MAX_FILENAME_LEN - len(out_name)
-        out_name = "%s,%s" % (tag[:tag_len], out_name)
+        out_name = f"{tag[:tag_len]},{out_name}"
         adb_excerpt_path = os.path.join(adb_excerpt_dir, out_name)
         with open(adb_excerpt_path, "w", encoding="utf-8") as out:
             in_file = logcat_path
@@ -859,8 +855,8 @@
                             break
                     except:
                         continue
-                    line_time = line[: acts_logger.log_line_timestamp_len]
-                    if not acts_logger.is_valid_logline_timestamp(line_time):
+                    line_time = line[: logger.log_line_timestamp_len]
+                    if not logger.is_valid_logline_timestamp(line_time):
                         continue
                     if self._is_timestamp_in_range(
                         line_time, log_begin_time, log_end_time
@@ -902,23 +898,21 @@
         """
         if not logcat_path:
             logcat_path = os.path.join(
-                self.device_log_path, "adblog_%s_debug.txt" % self.serial
+                self.device_log_path, f"adblog_{self.serial}_debug.txt"
             )
         if not os.path.exists(logcat_path):
-            self.log.warning("Logcat file %s does not exist." % logcat_path)
+            self.log.warning(f"Logcat file {logcat_path} does not exist.")
             return
-        output = job.run(
-            "grep '%s' %s" % (matching_string, logcat_path), ignore_status=True
-        )
+        output = job.run(f"grep '{matching_string}' {logcat_path}", ignore_status=True)
         if not output.stdout or output.exit_status != 0:
             return []
         if begin_time:
             if not isinstance(begin_time, datetime):
-                log_begin_time = acts_logger.epoch_to_log_line_timestamp(begin_time)
+                log_begin_time = logger.epoch_to_log_line_timestamp(begin_time)
                 begin_time = datetime.strptime(log_begin_time, "%Y-%m-%d %H:%M:%S.%f")
         if end_time:
             if not isinstance(end_time, datetime):
-                log_end_time = acts_logger.epoch_to_log_line_timestamp(end_time)
+                log_end_time = logger.epoch_to_log_line_timestamp(end_time)
                 end_time = datetime.strptime(log_end_time, "%Y-%m-%d %H:%M:%S.%f")
         result = []
         logs = re.findall(r"(\S+\s\S+)(.*)", output.stdout)
@@ -976,7 +970,7 @@
         """Stops the adb logcat collection subprocess."""
         if not self.is_adb_logcat_on:
             self.log.warning(
-                "Android device %s does not have an ongoing adb logcat " % self.serial
+                f"Android device {self.serial} does not have an ongoing adb logcat "
             )
             return
         # Set the last timestamp to the current timestamp. This may cause
@@ -995,7 +989,7 @@
         Linux UID for the apk.
         """
         output = self.adb.shell(
-            "dumpsys package %s | grep userId=" % apk_name, ignore_status=True
+            f"dumpsys package {apk_name} | grep userId=", ignore_status=True
         )
         result = re.search(r"userId=(\d+)", output)
         if result:
@@ -1014,7 +1008,7 @@
         """
         try:
             output = self.adb.shell(
-                "dumpsys package %s | grep versionName" % package_name
+                f"dumpsys package {package_name} | grep versionName"
             )
             pattern = re.compile(r"versionName=(.+)", re.I)
             result = pattern.findall(output)
@@ -1040,7 +1034,7 @@
         try:
             return bool(
                 self.adb.shell(
-                    '(pm list packages | grep -w "package:%s") || true' % package_name
+                    f'(pm list packages | grep -w "package:{package_name}") || true'
                 )
             )
 
@@ -1067,7 +1061,7 @@
         for cmd in ("ps -A", "ps"):
             try:
                 out = self.adb.shell(
-                    '%s | grep "S %s"' % (cmd, package_name), ignore_status=True
+                    f'{cmd} | grep "S {package_name}"', ignore_status=True
                 )
                 if package_name in out:
                     self.log.info("apk %s is running", package_name)
@@ -1096,7 +1090,7 @@
         True if package is installed. False otherwise.
         """
         try:
-            self.adb.shell("am force-stop %s" % package_name, ignore_status=True)
+            self.adb.shell(f"am force-stop {package_name}", ignore_status=True)
         except Exception as e:
             self.log.warning("Fail to stop package %s: %s", package_name, e)
 
@@ -1121,11 +1115,9 @@
         br_path = self.device_log_path
         os.makedirs(br_path, exist_ok=True)
         epoch = begin_time if begin_time else utils.get_current_epoch_time()
-        time_stamp = acts_logger.normalize_log_line_timestamp(
-            acts_logger.epoch_to_log_line_timestamp(epoch)
-        )
-        out_name = "AndroidDevice%s_%s" % (self.serial, time_stamp)
-        out_name = "%s.zip" % out_name if new_br else "%s.txt" % out_name
+        time_stamp = logger.sanitize_filename(logger.epoch_to_log_line_timestamp(epoch))
+        out_name = f"AndroidDevice{self.serial}_{time_stamp}"
+        out_name = f"{out_name}.zip" if new_br else f"{out_name}.txt"
         full_out_path = os.path.join(br_path, out_name)
         # in case device restarted, wait for adb interface to return
         self.wait_for_boot_completion()
@@ -1137,15 +1129,13 @@
             out = self.adb.shell("bugreportz", timeout=BUG_REPORT_TIMEOUT)
             if not out.startswith("OK"):
                 raise errors.AndroidDeviceError(
-                    "Failed to take bugreport on %s: %s" % (self.serial, out),
+                    f"Failed to take bugreport on {self.serial}: {out}",
                     serial=self.serial,
                 )
             br_out_path = out.split(":")[1].strip().split()[0]
-            self.adb.pull("%s %s" % (br_out_path, full_out_path))
+            self.adb.pull(f"{br_out_path} {full_out_path}")
         else:
-            self.adb.bugreport(
-                " > {}".format(full_out_path), timeout=BUG_REPORT_TIMEOUT
-            )
+            self.adb.bugreport(f" > {full_out_path}", timeout=BUG_REPORT_TIMEOUT)
         if test_name:
             self.log.info("Bugreport for %s taken at %s.", test_name, full_out_path)
         else:
@@ -1156,15 +1146,15 @@
         self, directory, begin_time=None, skip_files=[], match_string=None
     ):
         """Get files names with provided directory."""
-        cmd = "find %s -type f" % directory
+        cmd = f"find {directory} -type f"
         if begin_time:
             current_time = utils.get_current_epoch_time()
             seconds = int(math.ceil((current_time - begin_time) / 1000.0))
-            cmd = "%s -mtime -%ss" % (cmd, seconds)
+            cmd = f"{cmd} -mtime -{seconds}s"
         if match_string:
-            cmd = "%s -iname %s" % (cmd, match_string)
+            cmd = f"{cmd} -iname {match_string}"
         for skip_file in skip_files:
-            cmd = "%s ! -iname %s" % (cmd, skip_file)
+            cmd = f"{cmd} ! -iname {skip_file}"
         out = self.adb.shell(cmd, ignore_status=True)
         if (
             not out
@@ -1190,7 +1180,7 @@
         Args:
             file_path: The path of the file to check for.
         """
-        cmd = "(test -f %s && echo yes) || echo no" % file_path
+        cmd = f"(test -f {file_path} && echo yes) || echo no"
         result = self.adb.shell(cmd)
         if result == "yes":
             return True
@@ -1213,8 +1203,8 @@
         if not host_path:
             host_path = self.log_path
         for device_path in device_paths:
-            self.log.info("Pull from device: %s -> %s" % (device_path, host_path))
-            self.adb.pull("%s %s" % (device_path, host_path), timeout=PULL_TIMEOUT)
+            self.log.info(f"Pull from device: {device_path} -> {host_path}")
+            self.adb.pull(f"{device_path} {host_path}", timeout=PULL_TIMEOUT)
 
     def check_crash_report(
         self, test_name=None, begin_time=None, log_crash_report=False
@@ -1223,7 +1213,7 @@
         crash_reports = []
         for crash_path in CRASH_REPORT_PATHS:
             try:
-                cmd = "cd %s" % crash_path
+                cmd = f"cd {crash_path}"
                 self.adb.shell(cmd)
             except Exception as e:
                 self.log.debug("received exception %s", e)
@@ -1235,14 +1225,14 @@
                 tombstones = crashes[:]
                 for tombstone in tombstones:
                     if self.adb.shell(
-                        'cat %s | grep "crash_dump failed to dump process"' % tombstone
+                        f'cat {tombstone} | grep "crash_dump failed to dump process"'
                     ):
                         crashes.remove(tombstone)
             if crashes:
                 crash_reports.extend(crashes)
         if crash_reports and log_crash_report:
             crash_log_path = os.path.join(
-                self.device_log_path, "Crashes_%s" % self.serial
+                self.device_log_path, f"Crashes_{self.serial}"
             )
             os.makedirs(crash_log_path, exist_ok=True)
             self.pull_files(crash_reports, crash_log_path)
@@ -1257,31 +1247,28 @@
             log_path, begin_time=begin_time, match_string="*.qmdl"
         )
         if qxdm_logs:
-            qxdm_log_path = os.path.join(self.device_log_path, "QXDM_%s" % self.serial)
+            qxdm_log_path = os.path.join(self.device_log_path, f"QXDM_{self.serial}")
             os.makedirs(qxdm_log_path, exist_ok=True)
 
             self.log.info("Pull QXDM Log %s to %s", qxdm_logs, qxdm_log_path)
             self.pull_files(qxdm_logs, qxdm_log_path)
 
             self.adb.pull(
-                "/firmware/image/qdsp6m.qdb %s" % qxdm_log_path,
+                f"/firmware/image/qdsp6m.qdb {qxdm_log_path}",
                 timeout=PULL_TIMEOUT,
                 ignore_status=True,
             )
             # Zip Folder
-            utils.zip_directory("%s.zip" % qxdm_log_path, qxdm_log_path)
+            utils.zip_directory(f"{qxdm_log_path}.zip", qxdm_log_path)
             shutil.rmtree(qxdm_log_path)
         else:
-            self.log.error("Didn't find QXDM logs in %s." % log_path)
+            self.log.error(f"Didn't find QXDM logs in {log_path}.")
         if "Verizon" in self.adb.getprop("gsm.sim.operator.alpha"):
-            omadm_log_path = os.path.join(
-                self.device_log_path, "OMADM_%s" % self.serial
-            )
+            omadm_log_path = os.path.join(self.device_log_path, f"OMADM_{self.serial}")
             os.makedirs(omadm_log_path, exist_ok=True)
             self.log.info("Pull OMADM Log")
             self.adb.pull(
-                "/data/data/com.android.omadm.service/files/dm/log/ %s"
-                % omadm_log_path,
+                f"/data/data/com.android.omadm.service/files/dm/log/ {omadm_log_path}",
                 timeout=PULL_TIMEOUT,
                 ignore_status=True,
             )
@@ -1300,21 +1287,18 @@
                 path, begin_time=begin_time, match_string="*.sdm*"
             )
         if sdm_logs:
-            sdm_log_path = os.path.join(self.device_log_path, "SDM_%s" % self.serial)
+            sdm_log_path = os.path.join(self.device_log_path, f"SDM_{self.serial}")
             os.makedirs(sdm_log_path, exist_ok=True)
             self.log.info("Pull SDM Log %s to %s", sdm_logs, sdm_log_path)
             self.pull_files(sdm_logs, sdm_log_path)
         else:
-            self.log.error("Didn't find SDM logs in %s." % log_paths)
+            self.log.error(f"Didn't find SDM logs in {log_paths}.")
         if "Verizon" in self.adb.getprop("gsm.sim.operator.alpha"):
-            omadm_log_path = os.path.join(
-                self.device_log_path, "OMADM_%s" % self.serial
-            )
+            omadm_log_path = os.path.join(self.device_log_path, f"OMADM_{self.serial}")
             os.makedirs(omadm_log_path, exist_ok=True)
             self.log.info("Pull OMADM Log")
             self.adb.pull(
-                "/data/data/com.android.omadm.service/files/dm/log/ %s"
-                % omadm_log_path,
+                f"/data/data/com.android.omadm.service/files/dm/log/ {omadm_log_path}",
                 timeout=PULL_TIMEOUT,
                 ignore_status=True,
             )
@@ -1361,9 +1345,9 @@
             log_file_path: The complete file path to log the results.
 
         """
-        cmd = "iperf3 -c {} {}".format(server_host, extra_args)
+        cmd = f"iperf3 -c {server_host} {extra_args}"
         if log_file_path:
-            cmd += " --logfile {} &".format(log_file_path)
+            cmd += f" --logfile {log_file_path} &"
         self.adb.shell_nb(cmd)
 
     def run_iperf_client(self, server_host, extra_args="", timeout=IPERF_TIMEOUT):
@@ -1381,9 +1365,7 @@
             status: true if iperf client start successfully.
             results: results have data flow information
         """
-        out = self.adb.shell(
-            "iperf3 -c {} {}".format(server_host, extra_args), timeout=timeout
-        )
+        out = self.adb.shell(f"iperf3 -c {server_host} {extra_args}", timeout=timeout)
         clean_out = out.split("\n")
         if "error" in clean_out[0].lower():
             return False, clean_out
@@ -1401,7 +1383,7 @@
             status: true if iperf server started successfully.
             results: results have output of command
         """
-        out = self.adb.shell("iperf3 -s {}".format(extra_args))
+        out = self.adb.shell(f"iperf3 -s {extra_args}")
         clean_out = out.split("\n")
         if "error" in clean_out[0].lower():
             return False, clean_out
@@ -1431,7 +1413,7 @@
                 pass
             time.sleep(5)
         raise errors.AndroidDeviceError(
-            "Device %s booting process timed out." % self.serial, serial=self.serial
+            f"Device {self.serial} booting process timed out.", serial=self.serial
         )
 
     def reboot(
@@ -1503,11 +1485,11 @@
     def get_ipv4_address(self, interface="wlan0", timeout=5):
         for timer in range(0, timeout):
             try:
-                ip_string = self.adb.shell("ifconfig %s|grep inet" % interface)
+                ip_string = self.adb.shell(f"ifconfig {interface}|grep inet")
                 break
             except adb.AdbError as e:
                 if timer + 1 == timeout:
-                    self.log.warning("Unable to find IP address for %s." % interface)
+                    self.log.warning(f"Unable to find IP address for {interface}.")
                     return None
                 else:
                     time.sleep(1)
@@ -1545,7 +1527,7 @@
             return None
 
     def send_keycode(self, keycode):
-        self.adb.shell("input keyevent KEYCODE_%s" % keycode)
+        self.adb.shell(f"input keyevent KEYCODE_{keycode}")
 
     def get_my_current_focus_window(self):
         """Get the current focus window on screen"""
@@ -1667,7 +1649,7 @@
             self.send_keycode("SLEEP")
 
     def send_keycode_number_pad(self, number):
-        self.send_keycode("NUMPAD_%s" % number)
+        self.send_keycode(f"NUMPAD_{number}")
 
     def unlock_screen(self, password=None):
         self.log.info("Unlocking with %s", password or "swipe up")
@@ -1691,12 +1673,12 @@
             name: additional information of screenshot on the file name.
         """
         if name:
-            file_name = "%s_%s" % (DEFAULT_SCREENSHOT_PATH, name)
-        file_name = "%s_%s.png" % (file_name, utils.get_current_epoch_time())
+            file_name = f"{DEFAULT_SCREENSHOT_PATH}_{name}"
+        file_name = f"{file_name}_{utils.get_current_epoch_time()}.png"
         self.ensure_screen_on()
         self.log.info("Log screenshot to %s", file_name)
         try:
-            self.adb.shell("screencap -p %s" % file_name)
+            self.adb.shell(f"screencap -p {file_name}")
         except:
             self.log.error("Fail to log screenshot to %s", file_name)
 
@@ -1712,7 +1694,7 @@
                 "am start -a com.android.setupwizard.EXIT", ignore_status=True
             )
             self.adb.shell(
-                "pm disable %s" % self.get_setupwizard_package_name(),
+                f"pm disable {self.get_setupwizard_package_name()}",
                 ignore_status=True,
             )
         # Wait up to 5 seconds for user_setup_complete to be updated
@@ -1744,8 +1726,8 @@
         )
         wizard_package = package.split("=")[1]
         activity = package.split("=")[0].split("/")[-2]
-        self.log.info("%s/.%sActivity" % (wizard_package, activity))
-        return "%s/.%sActivity" % (wizard_package, activity)
+        self.log.info(f"{wizard_package}/.{activity}Activity")
+        return f"{wizard_package}/.{activity}Activity"
 
     def push_system_file(self, src_file_path, dst_file_path, push_timeout=300):
         """Pushes a file onto the read-only file system.
@@ -1765,7 +1747,7 @@
             self.ensure_verity_disabled()
             self.adb.remount()
             out = self.adb.push(
-                "%s %s" % (src_file_path, dst_file_path), timeout=push_timeout
+                f"{src_file_path} {dst_file_path}", timeout=push_timeout
             )
             if "error" in out:
                 self.log.error(
@@ -1819,5 +1801,5 @@
 
 class AndroidDeviceLoggerAdapter(logging.LoggerAdapter):
     def process(self, msg, kwargs):
-        msg = "[AndroidDevice|%s] %s" % (self.extra["serial"], msg)
+        msg = f"[AndroidDevice|{self.extra['serial']}] {msg}"
         return (msg, kwargs)
diff --git a/src/antlion/controllers/android_lib/__init__.py b/packages/antlion/controllers/android_lib/__init__.py
similarity index 100%
rename from src/antlion/controllers/android_lib/__init__.py
rename to packages/antlion/controllers/android_lib/__init__.py
diff --git a/src/antlion/controllers/android_lib/errors.py b/packages/antlion/controllers/android_lib/errors.py
similarity index 100%
rename from src/antlion/controllers/android_lib/errors.py
rename to packages/antlion/controllers/android_lib/errors.py
diff --git a/src/antlion/controllers/android_lib/events.py b/packages/antlion/controllers/android_lib/events.py
similarity index 100%
rename from src/antlion/controllers/android_lib/events.py
rename to packages/antlion/controllers/android_lib/events.py
diff --git a/src/antlion/controllers/android_lib/logcat.py b/packages/antlion/controllers/android_lib/logcat.py
similarity index 92%
rename from src/antlion/controllers/android_lib/logcat.py
rename to packages/antlion/controllers/android_lib/logcat.py
index 0a5e8f7..4aab7d0 100644
--- a/src/antlion/controllers/android_lib/logcat.py
+++ b/packages/antlion/controllers/android_lib/logcat.py
@@ -17,9 +17,9 @@
 import logging
 import re
 
-from antlion.libs.proc.process import Process
 from antlion.libs.logging import log_stream
 from antlion.libs.logging.log_stream import LogStyles
+from antlion.libs.proc.process import Process
 
 TIMESTAMP_REGEX = r"((?:\d+-)?\d+-\d+ \d+:\d+:\d+.\d+)"
 
@@ -73,11 +73,7 @@
         begin_at = '"%s"' % (timestamp_tracker.last_timestamp or 1)
         additional_params = extra_params or ""
 
-        return "adb -s %s logcat -T %s -v year %s" % (
-            serial,
-            begin_at,
-            additional_params,
-        )
+        return f"adb -s {serial} logcat -T {begin_at} -v year {additional_params}"
 
     return on_retry
 
@@ -94,12 +90,12 @@
         A acts.libs.proc.process.Process object.
     """
     logger = log_stream.create_logger(
-        "adblog_%s" % serial,
+        f"adblog_{serial}",
         log_name=serial,
         subcontext=logcat_dir,
         log_styles=(LogStyles.LOG_DEBUG | LogStyles.TESTCASE_LOG),
     )
-    process = Process("adb -s %s logcat -T 1 -v year %s" % (serial, extra_params))
+    process = Process(f"adb -s {serial} logcat -T 1 -v year {extra_params}")
     timestamp_tracker = TimestampTracker()
     process.set_on_output_callback(_log_line_func(logger, timestamp_tracker))
     process.set_on_terminate_callback(
diff --git a/src/antlion/controllers/android_lib/services.py b/packages/antlion/controllers/android_lib/services.py
similarity index 100%
rename from src/antlion/controllers/android_lib/services.py
rename to packages/antlion/controllers/android_lib/services.py
diff --git a/src/antlion/controllers/ap_lib/__init__.py b/packages/antlion/controllers/ap_lib/__init__.py
similarity index 100%
rename from src/antlion/controllers/ap_lib/__init__.py
rename to packages/antlion/controllers/ap_lib/__init__.py
diff --git a/src/antlion/controllers/ap_lib/ap_get_interface.py b/packages/antlion/controllers/ap_lib/ap_get_interface.py
similarity index 80%
rename from src/antlion/controllers/ap_lib/ap_get_interface.py
rename to packages/antlion/controllers/ap_lib/ap_get_interface.py
index 74a6d2c..9028ded 100644
--- a/src/antlion/controllers/ap_lib/ap_get_interface.py
+++ b/packages/antlion/controllers/ap_lib/ap_get_interface.py
@@ -15,10 +15,9 @@
 # limitations under the License.
 
 import logging
+from typing import TYPE_CHECKING
 
-from typing import List, Optional, Tuple, TYPE_CHECKING
-
-from antlion.libs.proc import job
+from antlion.runner import CalledProcessError
 
 if TYPE_CHECKING:
     from antlion.controllers.access_point import AccessPoint
@@ -36,7 +35,7 @@
     """Class to get network interface information for the device."""
 
     def __init__(
-        self, ap: "AccessPoint", wan_interface_override: Optional[str] = None
+        self, ap: "AccessPoint", wan_interface_override: str | None = None
     ) -> None:
         """Initialize the ApInterface class.
 
@@ -47,32 +46,33 @@
         self.ssh = ap.ssh
         self.wan_interface_override = wan_interface_override
 
-    def get_all_interface(self) -> List[str]:
+    def get_all_interface(self) -> list[str]:
         """Get all network interfaces on the device.
 
         Returns:
             interfaces_all: list of all the network interfaces on device
         """
         output = self.ssh.run(GET_ALL_INTERFACE)
-        interfaces_all = output.stdout.split("\n")
+        interfaces_all = output.stdout.decode("utf-8").split("\n")
 
         return interfaces_all
 
-    def get_virtual_interface(self) -> List[str]:
+    def get_virtual_interface(self) -> list[str]:
         """Get all virtual interfaces on the device.
 
         Returns:
             interfaces_virtual: list of all the virtual interfaces on device
         """
         output = self.ssh.run(GET_VIRTUAL_INTERFACE)
-        interfaces_virtual = output.stdout.split("\n")
+        interfaces_virtual = output.stdout.decode("utf-8").split("\n")
 
         return interfaces_virtual
 
-    def get_physical_interface(self) -> List[str]:
+    def get_physical_interface(self) -> list[str]:
         """Get all the physical interfaces of the device.
 
         Get all physical interfaces such as eth ports and wlan ports
+
         Returns:
             interfaces_phy: list of all the physical interfaces
         """
@@ -82,26 +82,29 @@
 
         return interfaces_phy
 
-    def get_bridge_interface(self) -> Optional[List[str]]:
+    def get_bridge_interface(self) -> list[str]:
         """Get all the bridge interfaces of the device.
 
         Returns:
             interfaces_bridge: the list of bridge interfaces, return None if
                 bridge utility is not available on the device
+
+        Raises:
+            ApInterfaceError: Failing to run brctl
         """
-        interfaces_bridge = []
         try:
             output = self.ssh.run(BRCTL_SHOW)
-            lines = output.stdout.split("\n")
-            for line in lines:
-                interfaces_bridge.append(line.split("\t")[0])
-            interfaces_bridge.pop(0)
-            return [x for x in interfaces_bridge if x != ""]
-        except job.Error:
-            logging.info("No brctl utility is available")
-            return None
+        except CalledProcessError as e:
+            raise ApInterfacesError(f'failed to execute "{BRCTL_SHOW}"') from e
 
-    def get_wlan_interface(self) -> Tuple[str, str]:
+        lines = output.stdout.decode("utf-8").split("\n")
+        interfaces_bridge = []
+        for line in lines:
+            interfaces_bridge.append(line.split("\t")[0])
+        interfaces_bridge.pop(0)
+        return [x for x in interfaces_bridge if x != ""]
+
+    def get_wlan_interface(self) -> tuple[str, str]:
         """Get all WLAN interfaces and specify 2.4 GHz and 5 GHz interfaces.
 
         Returns:
@@ -114,9 +117,9 @@
         interfaces_phy = self.get_physical_interface()
         for iface in interfaces_phy:
             output = self.ssh.run(f"iwlist {iface} freq")
-            if "Channel 06" in output.stdout and "Channel 36" not in output.stdout:
+            if b"Channel 06" in output.stdout and b"Channel 36" not in output.stdout:
                 wlan_2g = iface
-            elif "Channel 36" in output.stdout and "Channel 06" not in output.stdout:
+            elif b"Channel 36" in output.stdout and b"Channel 06" not in output.stdout:
                 wlan_5g = iface
 
         if wlan_2g is None or wlan_5g is None:
@@ -149,12 +152,12 @@
             return wan
 
         output = self.ssh.run("ifconfig")
-        interfaces_all = output.stdout.split("\n")
+        interfaces_all = output.stdout.decode("utf-8").split("\n")
         logging.info(f"IFCONFIG output = {interfaces_all}")
 
         raise ApInterfacesError("No WAN interface available")
 
-    def get_lan_interface(self) -> Optional[str]:
+    def get_lan_interface(self) -> str | None:
         """Get the LAN interface connecting to local devices.
 
         Returns:
@@ -169,7 +172,7 @@
         interfaces_eth.remove(interface_wan)
         for iface in interfaces_eth:
             output = self.ssh.run(f"ifconfig {iface}")
-            if "RUNNING" in output.stdout:
+            if b"RUNNING" in output.stdout:
                 lan = iface
                 break
         return lan
@@ -185,5 +188,5 @@
         try:
             self.ssh.run(f"ping -c 3 -I {iface} 8.8.8.8")
             return 1
-        except job.Error:
+        except CalledProcessError:
             return 0
diff --git a/src/antlion/controllers/ap_lib/ap_iwconfig.py b/packages/antlion/controllers/ap_lib/ap_iwconfig.py
similarity index 88%
rename from src/antlion/controllers/ap_lib/ap_iwconfig.py
rename to packages/antlion/controllers/ap_lib/ap_iwconfig.py
index 225a397..d5b4556 100644
--- a/src/antlion/controllers/ap_lib/ap_iwconfig.py
+++ b/packages/antlion/controllers/ap_lib/ap_iwconfig.py
@@ -14,9 +14,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from typing import Optional, TYPE_CHECKING
-
-from antlion.libs.proc.job import Result
+import subprocess
+from typing import TYPE_CHECKING
 
 if TYPE_CHECKING:
     from antlion.controllers.access_point import AccessPoint
@@ -40,8 +39,8 @@
         self.ssh = ap.ssh
 
     def ap_iwconfig(
-        self, interface: str, arguments: Optional[str] = None
-    ) -> Optional[Result]:
+        self, interface: str, arguments: str | None = None
+    ) -> subprocess.CompletedProcess[bytes]:
         """Configure the wireless interface using iwconfig.
 
         Returns:
diff --git a/src/antlion/controllers/ap_lib/bridge_interface.py b/packages/antlion/controllers/ap_lib/bridge_interface.py
similarity index 75%
rename from src/antlion/controllers/ap_lib/bridge_interface.py
rename to packages/antlion/controllers/ap_lib/bridge_interface.py
index ee4733e..383d289 100644
--- a/src/antlion/controllers/ap_lib/bridge_interface.py
+++ b/packages/antlion/controllers/ap_lib/bridge_interface.py
@@ -16,13 +16,14 @@
 
 import logging
 import time
-from antlion.libs.proc import job
+
+from antlion.runner import CalledProcessError
 
 _BRCTL = "brctl"
 BRIDGE_NAME = "br-lan"
-CREATE_BRIDGE = "%s addbr %s" % (_BRCTL, BRIDGE_NAME)
-DELETE_BRIDGE = "%s delbr %s" % (_BRCTL, BRIDGE_NAME)
-BRING_DOWN_BRIDGE = "ifconfig %s down" % BRIDGE_NAME
+CREATE_BRIDGE = f"{_BRCTL} addbr {BRIDGE_NAME}"
+DELETE_BRIDGE = f"{_BRCTL} delbr {BRIDGE_NAME}"
+BRING_DOWN_BRIDGE = f"ifconfig {BRIDGE_NAME} down"
 
 
 class BridgeInterfaceConfigs(object):
@@ -64,35 +65,29 @@
         # Create the bridge
         try:
             self.ssh.run(CREATE_BRIDGE)
-        except job.Error:
+        except CalledProcessError:
             logging.warning(
-                "Bridge interface {} already exists, no action needed".format(
-                    BRIDGE_NAME
-                )
+                f"Bridge interface {BRIDGE_NAME} already exists, no action needed"
             )
 
         # Enable 4addr mode on for the wlan interface
-        ENABLE_4ADDR = "iw dev %s set 4addr on" % (brconfigs.iface_wlan)
+        ENABLE_4ADDR = f"iw dev {brconfigs.iface_wlan} set 4addr on"
         try:
             self.ssh.run(ENABLE_4ADDR)
-        except job.Error:
-            logging.warning(
-                "4addr is already enabled on {}".format(brconfigs.iface_wlan)
-            )
+        except CalledProcessError:
+            logging.warning(f"4addr is already enabled on {brconfigs.iface_wlan}")
 
         # Add both LAN and WLAN interfaces to the bridge interface
         for interface in [brconfigs.iface_lan, brconfigs.iface_wlan]:
-            ADD_INTERFACE = "%s addif %s %s" % (_BRCTL, BRIDGE_NAME, interface)
+            ADD_INTERFACE = f"{_BRCTL} addif {BRIDGE_NAME} {interface}"
             try:
                 self.ssh.run(ADD_INTERFACE)
-            except job.Error:
-                logging.warning(
-                    "{} has already been added to {}".format(interface, BRIDGE_NAME)
-                )
+            except CalledProcessError:
+                logging.warning(f"{interface} has already been added to {BRIDGE_NAME}")
         time.sleep(5)
 
         # Set IP address on the bridge interface to bring it up
-        SET_BRIDGE_IP = "ifconfig %s %s" % (BRIDGE_NAME, brconfigs.bridge_ip)
+        SET_BRIDGE_IP = f"ifconfig {BRIDGE_NAME} {brconfigs.bridge_ip}"
         self.ssh.run(SET_BRIDGE_IP)
         time.sleep(2)
 
@@ -112,10 +107,10 @@
         self.ssh.run(DELETE_BRIDGE)
 
         # Bring down wlan interface and disable 4addr mode
-        BRING_DOWN_WLAN = "ifconfig %s down" % brconfigs.iface_wlan
+        BRING_DOWN_WLAN = f"ifconfig {brconfigs.iface_wlan} down"
         self.ssh.run(BRING_DOWN_WLAN)
         time.sleep(2)
-        DISABLE_4ADDR = "iw dev %s set 4addr off" % (brconfigs.iface_wlan)
+        DISABLE_4ADDR = f"iw dev {brconfigs.iface_wlan} set 4addr off"
         self.ssh.run(DISABLE_4ADDR)
         time.sleep(1)
         logging.info("Bridge interface is down")
diff --git a/src/antlion/controllers/ap_lib/dhcp_config.py b/packages/antlion/controllers/ap_lib/dhcp_config.py
similarity index 84%
rename from src/antlion/controllers/ap_lib/dhcp_config.py
rename to packages/antlion/controllers/ap_lib/dhcp_config.py
index a50b6d0..5fa8cf0 100644
--- a/src/antlion/controllers/ap_lib/dhcp_config.py
+++ b/packages/antlion/controllers/ap_lib/dhcp_config.py
@@ -13,6 +13,7 @@
 # limitations under the License.
 
 import copy
+from ipaddress import IPv4Address, IPv4Network
 
 _ROUTER_DNS = "8.8.8.8, 4.4.4.4"
 
@@ -32,13 +33,13 @@
 
     def __init__(
         self,
-        subnet,
-        start=None,
-        end=None,
-        router=None,
-        lease_time=None,
-        additional_parameters={},
-        additional_options={},
+        subnet: IPv4Network,
+        start: IPv4Address | None = None,
+        end: IPv4Address | None = None,
+        router: IPv4Address | None = None,
+        lease_time: int | None = None,
+        additional_parameters: dict[str, str] = {},
+        additional_options: dict[str, int | str] = {},
     ):
         """
         Args:
@@ -96,13 +97,12 @@
             # configuration. The improved logic that we can use is:
             #    a) erroring out if start and end encompass the whole network, and
             #    b) picking any address before self.start or after self.end.
-            self.router = None
             for host in self.network.hosts():
                 if host < self.start or host > self.end:
                     self.router = host
                     break
 
-            if not self.router:
+            if not hasattr(self, "router"):
                 raise ValueError("No useable host found.")
 
         self.lease_time = lease_time
@@ -157,9 +157,9 @@
         lines = []
 
         if self.default_lease_time:
-            lines.append("default-lease-time %d;" % self.default_lease_time)
+            lines.append(f"default-lease-time {self.default_lease_time};")
         if self.max_lease_time:
-            lines.append("max-lease-time %s;" % self.max_lease_time)
+            lines.append(f"max-lease-time {self.max_lease_time};")
 
         for subnet in self.subnets:
             address = subnet.network.network_address
@@ -173,31 +173,31 @@
 
             lines.append("subnet %s netmask %s {" % (address, mask))
             lines.append("\tpool {")
-            lines.append("\t\toption subnet-mask %s;" % mask)
-            lines.append("\t\toption routers %s;" % router)
-            lines.append("\t\trange %s %s;" % (start, end))
+            lines.append(f"\t\toption subnet-mask {mask};")
+            lines.append(f"\t\toption routers {router};")
+            lines.append(f"\t\trange {start} {end};")
             if lease_time:
-                lines.append("\t\tdefault-lease-time %d;" % lease_time)
-                lines.append("\t\tmax-lease-time %d;" % lease_time)
+                lines.append(f"\t\tdefault-lease-time {lease_time};")
+                lines.append(f"\t\tmax-lease-time {lease_time};")
             for param, value in additional_parameters.items():
-                lines.append("\t\t%s %s;" % (param, value))
+                lines.append(f"\t\t{param} {value};")
             for option, value in additional_options.items():
-                lines.append("\t\toption %s %s;" % (option, value))
+                lines.append(f"\t\toption {option} {value};")
             lines.append("\t}")
             lines.append("}")
 
         for mapping in self.static_mappings:
             identifier = mapping.identifier
             fixed_address = mapping.ipv4_address
-            host_fake_name = "host%s" % identifier.replace(":", "")
+            host_fake_name = f"host{identifier.replace(':', '')}"
             lease_time = mapping.lease_time
 
             lines.append("host %s {" % host_fake_name)
-            lines.append("\thardware ethernet %s;" % identifier)
-            lines.append("\tfixed-address %s;" % fixed_address)
+            lines.append(f"\thardware ethernet {identifier};")
+            lines.append(f"\tfixed-address {fixed_address};")
             if lease_time:
-                lines.append("\tdefault-lease-time %d;" % lease_time)
-                lines.append("\tmax-lease-time %d;" % lease_time)
+                lines.append(f"\tdefault-lease-time {lease_time};")
+                lines.append(f"\tmax-lease-time {lease_time};")
             lines.append("}")
 
         config_str = "\n".join(lines)
diff --git a/src/antlion/controllers/ap_lib/dhcp_server.py b/packages/antlion/controllers/ap_lib/dhcp_server.py
similarity index 80%
rename from src/antlion/controllers/ap_lib/dhcp_server.py
rename to packages/antlion/controllers/ap_lib/dhcp_server.py
index c52983b..dd3f608 100644
--- a/src/antlion/controllers/ap_lib/dhcp_server.py
+++ b/packages/antlion/controllers/ap_lib/dhcp_server.py
@@ -12,13 +12,15 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import logging
 import time
 
+from mobly import logger
 from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_fixed
 
 from antlion.controllers.ap_lib.dhcp_config import DhcpConfig
 from antlion.controllers.utils_lib.commands import shell
-from antlion import logger
+from antlion.runner import Runner
 
 
 class Error(Exception):
@@ -40,7 +42,7 @@
 
     PROGRAM_FILE = "dhcpd"
 
-    def __init__(self, runner, interface, working_dir="/tmp"):
+    def __init__(self, runner: Runner, interface: str, working_dir: str = "/tmp"):
         """
         Args:
             runner: Object that has a run_async and run methods for running
@@ -48,16 +50,21 @@
             interface: string, The name of the interface to use.
             working_dir: The directory to work out of.
         """
-        self._log = logger.create_logger(lambda msg: f"[DHCP Server|{interface}] {msg}")
+        self._log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[DHCP Server|{interface}]",
+            },
+        )
 
         self._runner = runner
         self._working_dir = working_dir
-        self._shell = shell.ShellCommand(runner, working_dir)
-        self._stdio_log_file = f"dhcpd_{interface}.log"
-        self._config_file = f"dhcpd_{interface}.conf"
-        self._lease_file = f"dhcpd_{interface}.leases"
-        self._pid_file = f"dhcpd_{interface}.pid"
-        self._identifier = f"{self.PROGRAM_FILE}.*{self._config_file}"
+        self._shell = shell.ShellCommand(runner)
+        self._stdio_log_file = f"{working_dir}/dhcpd_{interface}.log"
+        self._config_file = f"{working_dir}/dhcpd_{interface}.conf"
+        self._lease_file = f"{working_dir}/dhcpd_{interface}.leases"
+        self._pid_file = f"{working_dir}/dhcpd_{interface}.pid"
+        self._identifier: int | None = None
 
     # There is a slight timing issue where if the proc filesystem in Linux
     # doesn't get updated in time as when this is called, the NoInterfaceError
@@ -98,29 +105,32 @@
 
         base_command = f'cd "{self._working_dir}"; {dhcpd_command}'
         job_str = f'{base_command} > "{self._stdio_log_file}" 2>&1'
-        self._runner.run_async(job_str)
+        self._identifier = int(self._runner.run_async(job_str).stdout)
 
         try:
             self._wait_for_process(timeout=timeout_sec)
             self._wait_for_server(timeout=timeout_sec)
         except:
-            self._log.warn("Failed to start DHCP server.")
-            self._log.info("DHCP configuration:\n" + config.render_config_file() + "\n")
-            self._log.info("DHCP logs:\n" + self.get_logs() + "\n")
+            self._log.warning("Failed to start DHCP server.")
+            self._log.info(f"DHCP configuration:\n{config.render_config_file()}\n")
+            self._log.info(f"DHCP logs:\n{self.get_logs()}\n")
             self.stop()
             raise
 
-    def stop(self):
+    def stop(self) -> None:
         """Kills the daemon if it is running."""
-        if self.is_alive():
+        if self._identifier and self.is_alive():
             self._shell.kill(self._identifier)
+            self._identifier = None
 
-    def is_alive(self):
+    def is_alive(self) -> bool:
         """
         Returns:
             True if the daemon is running.
         """
-        return self._shell.is_alive(self._identifier)
+        if self._identifier:
+            return self._shell.is_alive(self._identifier)
+        return False
 
     def get_logs(self) -> str:
         """Pulls the log files from where dhcp server is running.
@@ -130,7 +140,7 @@
         """
         return self._shell.read_file(self._stdio_log_file)
 
-    def _wait_for_process(self, timeout=60):
+    def _wait_for_process(self, timeout: float = 60) -> None:
         """Waits for the process to come up.
 
         Waits until the dhcp server process is found running, or there is
@@ -146,7 +156,7 @@
 
         self._scan_for_errors(True)
 
-    def _wait_for_server(self, timeout=60):
+    def _wait_for_server(self, timeout: float = 60) -> None:
         """Waits for dhcp server to report that the server is up.
 
         Waits until dhcp server says the server has been brought up or an
@@ -164,7 +174,7 @@
 
             self._scan_for_errors(True)
 
-    def _scan_for_errors(self, should_be_up):
+    def _scan_for_errors(self, should_be_up: bool) -> None:
         """Scans the dhcp server log for any errors.
 
         Args:
@@ -195,7 +205,7 @@
         if should_be_up and is_dead:
             raise Error("Dhcp server failed to start.", self)
 
-    def _write_configs(self, config):
+    def _write_configs(self, config: DhcpConfig) -> None:
         """Writes the configs to the dhcp server config file."""
         self._shell.delete_file(self._config_file)
         config_str = config.render_config_file()
diff --git a/src/antlion/controllers/ap_lib/extended_capabilities.py b/packages/antlion/controllers/ap_lib/extended_capabilities.py
similarity index 97%
rename from src/antlion/controllers/ap_lib/extended_capabilities.py
rename to packages/antlion/controllers/ap_lib/extended_capabilities.py
index 82029cc..4570409 100644
--- a/src/antlion/controllers/ap_lib/extended_capabilities.py
+++ b/packages/antlion/controllers/ap_lib/extended_capabilities.py
@@ -15,7 +15,6 @@
 # limitations under the License.
 
 from enum import IntEnum, unique
-from typing import Tuple
 
 
 @unique
@@ -119,7 +118,7 @@
     # 88-n reserved
 
 
-def _offsets(ext_cap_offset: ExtendedCapability) -> Tuple[int, int]:
+def _offsets(ext_cap_offset: ExtendedCapability) -> tuple[int, int]:
     """For given capability, return the byte and bit offsets within the field.
 
     802.11 divides the extended capability field into bytes, as does the
@@ -166,7 +165,7 @@
             ExtendedCapability.MAX_NUMBER_OF_MSDUS_IN_A_MSDU,
         ]:
             raise NotImplementedError(
-                f"{ext_cap.name} not implemented yet by {__class__}"
+                f"{ext_cap.name} not implemented yet by {self.__class__}"
             )
         byte_offset, bit_offset = _offsets(ext_cap)
         if len(self._ext_cap) > byte_offset:
diff --git a/src/antlion/controllers/ap_lib/hostapd.py b/packages/antlion/controllers/ap_lib/hostapd.py
similarity index 66%
rename from src/antlion/controllers/ap_lib/hostapd.py
rename to packages/antlion/controllers/ap_lib/hostapd.py
index b3f780d..9226c6b 100644
--- a/src/antlion/controllers/ap_lib/hostapd.py
+++ b/packages/antlion/controllers/ap_lib/hostapd.py
@@ -17,16 +17,21 @@
 import logging
 import re
 import time
+from datetime import datetime, timezone
+from subprocess import CalledProcessError
+from typing import Any, Iterable
 
-from typing import Any, Dict, Optional, Set
+from tenacity import retry, retry_if_exception_type, stop_after_attempt
 
 from antlion.controllers.ap_lib import hostapd_constants
 from antlion.controllers.ap_lib.extended_capabilities import ExtendedCapabilities
+from antlion.controllers.ap_lib.hostapd_config import HostapdConfig
 from antlion.controllers.ap_lib.wireless_network_management import (
     BssTransitionManagementRequest,
 )
 from antlion.controllers.utils_lib.commands import shell
-from antlion.libs.proc.job import Result
+from antlion.logger import LogLevel
+from antlion.runner import Runner
 
 PROGRAM_FILE = "/usr/sbin/hostapd"
 CLI_PROGRAM_FILE = "/usr/bin/hostapd_cli"
@@ -36,6 +41,10 @@
     """An error caused by hostapd."""
 
 
+class InterfaceInitError(Error):
+    """Interface initialization failed during hostapd start."""
+
+
 class Hostapd(object):
     """Manages the hostapd program.
 
@@ -43,7 +52,9 @@
         config: The hostapd configuration that is being used.
     """
 
-    def __init__(self, runner: Any, interface: str, working_dir: str = "/tmp") -> None:
+    def __init__(
+        self, runner: Runner, interface: str, working_dir: str = "/tmp"
+    ) -> None:
         """
         Args:
             runner: Object that has run_async and run methods for executing
@@ -54,18 +65,21 @@
         self._runner = runner
         self._interface = interface
         self._working_dir = working_dir
-        self.config = None
-        self._shell = shell.ShellCommand(runner, working_dir)
-        self._log_file = f"hostapd-{self._interface}.log"
-        self._ctrl_file = f"hostapd-{self._interface}.ctrl"
-        self._config_file = f"hostapd-{self._interface}.conf"
+        self.config: HostapdConfig | None = None
+        self._shell = shell.ShellCommand(runner)
+        self._log_file = f"{working_dir}/hostapd-{self._interface}.log"
+        self._ctrl_file = f"{working_dir}/hostapd-{self._interface}.ctrl"
+        self._config_file = f"{working_dir}/hostapd-{self._interface}.conf"
         self._identifier = f"{PROGRAM_FILE}.*{self._config_file}"
 
+    @retry(
+        stop=stop_after_attempt(3), retry=retry_if_exception_type(InterfaceInitError)
+    )
     def start(
         self,
-        config: Any,
+        config: HostapdConfig,
         timeout: int = 60,
-        additional_parameters: Optional[Dict[str, Any]] = None,
+        additional_parameters: dict[str, Any] | None = None,
     ) -> None:
         """Starts hostapd
 
@@ -86,15 +100,17 @@
             it's impossible to wait on. If you need to check if configs are ok
             then periodic checks to is_running and logs should be used.
         """
-        if self.is_alive():
-            self.stop()
+        if additional_parameters is None:
+            additional_parameters = {}
+
+        self.stop()
 
         self.config = config
 
         self._shell.delete_file(self._ctrl_file)
         self._shell.delete_file(self._log_file)
         self._shell.delete_file(self._config_file)
-        self._write_configs(additional_parameters=additional_parameters)
+        self._write_configs(additional_parameters)
 
         hostapd_command = f'{PROGRAM_FILE} -dd -t "{self._config_file}"'
         base_command = f'cd "{self._working_dir}"; {hostapd_command}'
@@ -113,18 +129,23 @@
         if self.is_alive():
             self._shell.kill(self._identifier)
 
-    def channel_switch(self, channel_num: int) -> None:
+    def channel_switch(self, channel_num: int, csa_beacon_count: int) -> None:
         """Switches to the given channel.
 
+        Args:
+            channel_num: Channel to switch to.
+            csa_beacon_count: Number of channel switch announcement beacons to
+                send.
+
         Returns:
             acts.libs.proc.job.Result containing the results of the command.
+
         Raises: See _run_hostapd_cli_cmd
         """
         try:
             channel_freq = hostapd_constants.FREQUENCY_MAP[channel_num]
         except KeyError:
             raise ValueError(f"Invalid channel number {channel_num}")
-        csa_beacon_count = 10
         channel_switch_cmd = f"chan_switch {csa_beacon_count} {channel_freq}"
         self._run_hostapd_cli_cmd(channel_switch_cmd)
 
@@ -135,7 +156,7 @@
         """
         status_cmd = "status"
         result = self._run_hostapd_cli_cmd(status_cmd)
-        match = re.search(r"^channel=(\d+)$", result.stdout, re.MULTILINE)
+        match = re.search(r"^channel=(\d+)$", result, re.MULTILINE)
         if not match:
             raise Error("Current channel could not be determined")
         try:
@@ -144,36 +165,26 @@
             raise Error("Internal error: current channel could not be parsed")
         return channel
 
-    def _list_sta(self) -> Result:
-        """List all associated STA MAC addresses.
-
-        Returns:
-            acts.libs.proc.job.Result containing the results of the command.
-        Raises: See _run_hostapd_cli_cmd
-        """
-        list_sta_cmd = "list_sta"
-        return self._run_hostapd_cli_cmd(list_sta_cmd)
-
-    def get_stas(self) -> Set[str]:
+    def get_stas(self) -> set[str]:
         """Return MAC addresses of all associated STAs."""
-        list_sta_result = self._list_sta()
+        list_sta_result = self._run_hostapd_cli_cmd("list_sta")
         stas = set()
-        for line in list_sta_result.stdout.splitlines():
+        for line in list_sta_result.splitlines():
             # Each line must be a valid MAC address. Capture it.
             m = re.match(r"((?:[0-9A-Fa-f]{2}:){5}[0-9A-Fa-f]{2})", line)
             if m:
                 stas.add(m.group(1))
         return stas
 
-    def _sta(self, sta_mac: str) -> Result:
+    def _sta(self, sta_mac: str) -> str:
         """Return hostapd's detailed info about an associated STA.
 
         Returns:
-            acts.libs.proc.job.Result containing the results of the command.
+            Results of the command.
+
         Raises: See _run_hostapd_cli_cmd
         """
-        sta_cmd = "sta {}".format(sta_mac)
-        return self._run_hostapd_cli_cmd(sta_cmd)
+        return self._run_hostapd_cli_cmd(f"sta {sta_mac}")
 
     def get_sta_extended_capabilities(self, sta_mac: str) -> ExtendedCapabilities:
         """Get extended capabilities for the given STA, as seen by the AP.
@@ -189,7 +200,7 @@
         # hostapd ext_capab field is a hex encoded string representation of the
         # 802.11 extended capabilities structure, each byte represented by two
         # chars (each byte having format %02x).
-        m = re.search(r"ext_capab=([0-9A-Faf]+)", sta_result.stdout, re.MULTILINE)
+        m = re.search(r"ext_capab=([0-9A-Faf]+)", sta_result, re.MULTILINE)
         if not m:
             raise Error("Failed to get ext_capab from STA details")
         raw_ext_capab = m.group(1)
@@ -198,9 +209,51 @@
         except ValueError:
             raise Error(f"ext_capab contains invalid hex string repr {raw_ext_capab}")
 
+    def sta_authenticated(self, sta_mac: str) -> bool:
+        """Is the given STA authenticated?
+
+        Args:
+            sta_mac: MAC address of the STA in question.
+        Returns:
+            True if AP sees that the STA is authenticated, False otherwise.
+        Raises:
+            Error if authenticated status for the STA cannot be obtained.
+        """
+        sta_result = self._sta(sta_mac)
+        m = re.search(r"flags=.*\[AUTH\]", sta_result, re.MULTILINE)
+        return bool(m)
+
+    def sta_associated(self, sta_mac: str) -> bool:
+        """Is the given STA associated?
+
+        Args:
+            sta_mac: MAC address of the STA in question.
+        Returns:
+            True if AP sees that the STA is associated, False otherwise.
+        Raises:
+            Error if associated status for the STA cannot be obtained.
+        """
+        sta_result = self._sta(sta_mac)
+        m = re.search(r"flags=.*\[ASSOC\]", sta_result, re.MULTILINE)
+        return bool(m)
+
+    def sta_authorized(self, sta_mac: str) -> bool:
+        """Is the given STA authorized (802.1X controlled port open)?
+
+        Args:
+            sta_mac: MAC address of the STA in question.
+        Returns:
+            True if AP sees that the STA is 802.1X authorized, False otherwise.
+        Raises:
+            Error if authorized status for the STA cannot be obtained.
+        """
+        sta_result = self._sta(sta_mac)
+        m = re.search(r"flags=.*\[AUTHORIZED\]", sta_result, re.MULTILINE)
+        return bool(m)
+
     def _bss_tm_req(
         self, client_mac: str, request: BssTransitionManagementRequest
-    ) -> Result:
+    ) -> None:
         """Send a hostapd BSS Transition Management request command to a STA.
 
         Args:
@@ -228,21 +281,22 @@
             bss_tm_req_cmd += f" valid_int={request.validity_interval}"
 
         # neighbor= can appear multiple times, so it requires special handling.
-        for neighbor in request.candidate_list:
-            bssid = neighbor.bssid
-            bssid_info = hex(neighbor.bssid_information)
-            op_class = neighbor.operating_class
-            chan_num = neighbor.channel_number
-            phy_type = int(neighbor.phy_type)
-            bss_tm_req_cmd += (
-                f" neighbor={bssid},{bssid_info},{op_class},{chan_num},{phy_type}"
-            )
+        if request.candidate_list is not None:
+            for neighbor in request.candidate_list:
+                bssid = neighbor.bssid
+                bssid_info = hex(neighbor.bssid_information)
+                op_class = neighbor.operating_class
+                chan_num = neighbor.channel_number
+                phy_type = int(neighbor.phy_type)
+                bss_tm_req_cmd += (
+                    f" neighbor={bssid},{bssid_info},{op_class},{chan_num},{phy_type}"
+                )
 
-        return self._run_hostapd_cli_cmd(bss_tm_req_cmd)
+        self._run_hostapd_cli_cmd(bss_tm_req_cmd)
 
     def send_bss_transition_management_req(
         self, sta_mac: str, request: BssTransitionManagementRequest
-    ) -> Result:
+    ) -> None:
         """Send a BSS Transition Management request to an associated STA.
 
         Args:
@@ -252,7 +306,7 @@
             acts.libs.proc.job.Result containing the results of the command.
         Raises: See _run_hostapd_cli_cmd
         """
-        return self._bss_tm_req(sta_mac, request)
+        self._bss_tm_req(sta_mac, request)
 
     def is_alive(self) -> bool:
         """
@@ -268,29 +322,50 @@
             A string of the hostapd logs.
         """
         # TODO: Auto pulling of logs when stop is called.
-        return self._shell.read_file(self._log_file)
+        with LogLevel(self._runner.log, logging.INFO):
+            log = self._shell.read_file(self._log_file)
 
-    def _run_hostapd_cli_cmd(self, cmd: str) -> Result:
+        # Convert epoch to human-readable times
+        result: list[str] = []
+        for line in log.splitlines():
+            try:
+                end = line.index(":")
+                epoch = float(line[:end])
+                timestamp = datetime.fromtimestamp(epoch, timezone.utc).strftime(
+                    "%m-%d %H:%M:%S.%f"
+                )
+                result.append(f"{timestamp} {line[end+1:]}")
+            except ValueError:  # Colon not found or float conversion failure
+                result.append(line)
+
+        return "\n".join(result)
+
+    def _run_hostapd_cli_cmd(self, cmd: str) -> str:
         """Run the given hostapd_cli command.
 
         Runs the command, waits for the output (up to default timeout), and
             returns the result.
 
         Returns:
-            acts.libs.proc.job.Result containing the results of the ssh command.
+            Results of the ssh command.
 
         Raises:
-            acts.lib.proc.job.TimeoutError: When the remote command took too
+            subprocess.TimeoutExpired: When the remote command took too
                 long to execute.
             antlion.controllers.utils_lib.ssh.connection.Error: When the ssh
                 connection failed to be created.
-            antlion.controllers.utils_lib.ssh.connection.CommandError: Ssh worked,
-                but the command had an error executing.
+            subprocess.CalledProcessError: Ssh worked, but the command had an
+                error executing.
         """
         hostapd_cli_job = (
             f"cd {self._working_dir}; " f"{CLI_PROGRAM_FILE} -p {self._ctrl_file} {cmd}"
         )
-        return self._runner.run(hostapd_cli_job)
+        proc = self._runner.run(hostapd_cli_job)
+        if proc.returncode:
+            raise CalledProcessError(
+                proc.returncode, hostapd_cli_job, proc.stdout, proc.stderr
+            )
+        return proc.stdout.decode("utf-8")
 
     def _wait_for_process(self, timeout: int = 60) -> None:
         """Waits for the process to come up.
@@ -333,7 +408,10 @@
                           is thrown.
 
         Raises:
-            Error: Raised when a hostapd error is found.
+            Error: when a hostapd error is found.
+            InterfaceInitError: when the interface fails to initialize. This is
+                a recoverable error that is usually caused by other processes
+                using this interface at the same time.
         """
         # Store this so that all other errors have priority.
         is_dead = not self.is_alive()
@@ -342,29 +420,27 @@
             "Interface initialization failed", self._log_file
         )
         if bad_config:
-            raise Error("Interface failed to start", self)
+            raise InterfaceInitError("Interface failed to initialize", self)
 
         bad_config = self._shell.search_file(
             f"Interface {self._interface} wasn't started", self._log_file
         )
         if bad_config:
-            raise Error("Interface failed to start", self)
+            raise Error("Interface wasn't started", self)
 
         if should_be_up and is_dead:
             raise Error("Hostapd failed to start", self)
 
-    def _write_configs(
-        self, additional_parameters: Optional[Dict[str, Any]] = None
-    ) -> None:
+    def _write_configs(self, additional_parameters: dict[str, Any]) -> None:
         """Writes the configs to the hostapd config file."""
         self._shell.delete_file(self._config_file)
 
         interface_configs = collections.OrderedDict()
         interface_configs["interface"] = self._interface
         interface_configs["ctrl_interface"] = self._ctrl_file
-        pairs = (f"{k}={v}" for k, v in interface_configs.items())
+        pairs: Iterable[str] = (f"{k}={v}" for k, v in interface_configs.items())
 
-        packaged_configs = self.config.package_configs()
+        packaged_configs = self.config.package_configs() if self.config else []
         if additional_parameters:
             packaged_configs.append(additional_parameters)
         for packaged_config in packaged_configs:
diff --git a/src/antlion/controllers/ap_lib/hostapd_ap_preset.py b/packages/antlion/controllers/ap_lib/hostapd_ap_preset.py
similarity index 90%
rename from src/antlion/controllers/ap_lib/hostapd_ap_preset.py
rename to packages/antlion/controllers/ap_lib/hostapd_ap_preset.py
index 3b694c0..d32e97e 100644
--- a/src/antlion/controllers/ap_lib/hostapd_ap_preset.py
+++ b/packages/antlion/controllers/ap_lib/hostapd_ap_preset.py
@@ -12,9 +12,10 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from typing import Any, FrozenSet, List, Optional
+from __future__ import annotations
 
-from antlion import utils
+from typing import Any, FrozenSet, TypeVar
+
 from antlion.controllers.ap_lib import hostapd_config, hostapd_constants, hostapd_utils
 from antlion.controllers.ap_lib.hostapd_security import Security
 from antlion.controllers.ap_lib.third_party_ap_profiles import (
@@ -27,8 +28,10 @@
     tplink,
 )
 
+T = TypeVar("T")
 
-def _get_or_default(var: Optional[Any], default_value: Any) -> Any:
+
+def _get_or_default(var: T | None, default_value: T) -> T:
     """Check variable and return non-null value.
 
     Args:
@@ -42,28 +45,29 @@
 
 
 def create_ap_preset(
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
     profile_name: str = "whirlwind",
-    iface_wlan_2g: Optional[str] = None,
-    iface_wlan_5g: Optional[str] = None,
-    channel: Optional[int] = None,
-    mode: Optional[str] = None,
-    frequency: Optional[int] = None,
-    security: Optional[Security] = None,
-    pmf_support: Optional[int] = None,
-    ssid: Optional[str] = None,
-    hidden: Optional[bool] = None,
-    dtim_period: Optional[int] = None,
-    frag_threshold: Optional[int] = None,
-    rts_threshold: Optional[int] = None,
-    force_wmm: Optional[bool] = None,
-    beacon_interval: Optional[int] = None,
-    short_preamble: Optional[bool] = None,
-    n_capabilities: Optional[List[Any]] = None,
-    ac_capabilities: Optional[List[Any]] = None,
-    vht_bandwidth: Optional[int] = None,
+    channel: int | None = None,
+    mode: str | None = None,
+    frequency: int | None = None,
+    security: Security | None = None,
+    pmf_support: int | None = None,
+    ssid: str | None = None,
+    hidden: bool | None = None,
+    dtim_period: int | None = None,
+    frag_threshold: int | None = None,
+    rts_threshold: int | None = None,
+    force_wmm: bool | None = None,
+    beacon_interval: int | None = None,
+    short_preamble: bool | None = None,
+    n_capabilities: list[Any] | None = None,
+    ac_capabilities: list[Any] | None = None,
+    vht_bandwidth: int | None = None,
     wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(),
-    bss_settings: List[Any] = [],
-):
+    bss_settings: list[Any] = [],
+    ap_max_inactivity: int | None = None,
+) -> hostapd_config.HostapdConfig:
     """AP preset config generator.  This a wrapper for hostapd_config but
        but supplies the default settings for the preset that is selected.
 
@@ -74,12 +78,12 @@
     Args:
         profile_name: The name of the device want the preset for.
                       Options: whirlwind
-        channel: int, channel number.
-        dtim: int, DTIM value of the AP, default is 2.
-        frequency: int, frequency of channel.
+        channel: Channel number.
+        dtim: DTIM value of the AP, default is 2.
+        frequency: Frequency of channel.
         security: The security settings to use.
-        ssid: string, The name of the ssid to broadcast.
-        pmf_support: int, whether pmf is disabled, enabled, or required
+        ssid: The name of the ssid to broadcast.
+        pmf_support: Whether pmf is disabled, enabled, or required
         vht_bandwidth: VHT bandwidth for 11ac operation.
         bss_settings: The settings for all bss.
         iface_wlan_2g: the wlan 2g interface name of the AP.
@@ -94,20 +98,24 @@
         n_capabilities: 802.11n capabilities for for BSS to advertise.
         ac_capabilities: 802.11ac capabilities for for BSS to advertise.
         wnm_features: WNM features to enable on the AP.
+        ap_max_inactivity: See hostapd.conf's ap_max_inactivity setting.
 
     Returns: A hostapd_config object that can be used by the hostapd object.
     """
+    if security is None:
+        security = Security()
 
     # Verify interfaces
     hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
     hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
 
-    if channel:
+    if channel is not None:
         frequency = hostapd_config.get_frequency_for_channel(channel)
-    elif frequency:
+    elif frequency is not None:
         channel = hostapd_config.get_channel_for_frequency(frequency)
-    else:
-        raise ValueError("Specify either frequency or channel.")
+
+    if channel is None or frequency is None:
+        raise ValueError("Must specify channel or frequency")
 
     if profile_name == "whirlwind":
         # profile indicates phy mode is 11bgn for 2.4Ghz or 11acn for 5Ghz
@@ -149,6 +157,7 @@
                 rts_threshold=rts_threshold,
                 wnm_features=wnm_features,
                 bss_settings=bss_settings,
+                ap_max_inactivity=ap_max_inactivity,
             )
         else:
             interface = iface_wlan_5g
@@ -164,7 +173,7 @@
                 extended_channel = hostapd_constants.N_CAPABILITY_HT20
             # Define the n capability vector for 20 MHz and higher bandwidth
             if not vht_bandwidth:
-                pass
+                n_capabilities = _get_or_default(n_capabilities, [])
             elif vht_bandwidth >= 40:
                 n_capabilities = _get_or_default(
                     n_capabilities,
@@ -217,9 +226,11 @@
                 frequency=frequency,
                 frag_threshold=frag_threshold,
                 rts_threshold=rts_threshold,
+                wnm_features=wnm_features,
                 n_capabilities=n_capabilities,
                 ac_capabilities=ac_capabilities,
                 bss_settings=bss_settings,
+                ap_max_inactivity=ap_max_inactivity,
             )
     elif profile_name == "whirlwind_11ab_legacy":
         if frequency < 5000:
@@ -246,6 +257,7 @@
             ac_capabilities=[],
             vht_bandwidth=None,
             wnm_features=wnm_features,
+            ap_max_inactivity=ap_max_inactivity,
         )
     elif profile_name == "whirlwind_11ag_legacy":
         if frequency < 5000:
@@ -272,6 +284,7 @@
             ac_capabilities=[],
             vht_bandwidth=None,
             wnm_features=wnm_features,
+            ap_max_inactivity=ap_max_inactivity,
         )
     elif profile_name == "mistral":
         hidden = _get_or_default(hidden, False)
@@ -288,6 +301,12 @@
             "vendor_elements": "dd0cf4f5e80505ff0000ffffffff" "070a75732024041e95051e00"
         }
         default_configs = {"bridge": "br-lan", "iapp_interface": "br-lan"}
+        additional_params = (
+            vendor_elements
+            | default_configs
+            | hostapd_constants.ENABLE_RRM_BEACON_REPORT
+            | hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT
+        )
 
         if frequency < 5000:
             interface = iface_wlan_2g
@@ -304,12 +323,6 @@
                 ],
             )
 
-            additional_params = utils.merge_dicts(
-                vendor_elements,
-                hostapd_constants.ENABLE_RRM_BEACON_REPORT,
-                hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT,
-                default_configs,
-            )
             config = hostapd_config.HostapdConfig(
                 ssid=ssid,
                 hidden=hidden,
@@ -329,6 +342,7 @@
                 bss_settings=bss_settings,
                 additional_parameters=additional_params,
                 set_ap_defaults_profile=profile_name,
+                ap_max_inactivity=ap_max_inactivity,
             )
         else:
             interface = iface_wlan_5g
@@ -385,12 +399,6 @@
                 ],
             )
 
-            additional_params = utils.merge_dicts(
-                vendor_elements,
-                hostapd_constants.ENABLE_RRM_BEACON_REPORT,
-                hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT,
-                default_configs,
-            )
             config = hostapd_config.HostapdConfig(
                 ssid=ssid,
                 hidden=hidden,
@@ -412,6 +420,7 @@
                 bss_settings=bss_settings,
                 additional_parameters=additional_params,
                 set_ap_defaults_profile=profile_name,
+                ap_max_inactivity=ap_max_inactivity,
             )
     elif profile_name == "actiontec_pk5000":
         config = actiontec.actiontec_pk5000(
diff --git a/src/antlion/controllers/ap_lib/hostapd_bss_settings.py b/packages/antlion/controllers/ap_lib/hostapd_bss_settings.py
similarity index 62%
rename from src/antlion/controllers/ap_lib/hostapd_bss_settings.py
rename to packages/antlion/controllers/ap_lib/hostapd_bss_settings.py
index 56a5422..2f4d261 100644
--- a/src/antlion/controllers/ap_lib/hostapd_bss_settings.py
+++ b/packages/antlion/controllers/ap_lib/hostapd_bss_settings.py
@@ -14,6 +14,8 @@
 
 import collections
 
+from antlion.controllers.ap_lib.hostapd_security import Security
+
 
 class BssSettings(object):
     """Settings for a bss.
@@ -21,22 +23,30 @@
     Settings for a bss to allow multiple network on a single device.
 
     Attributes:
-        name: string, The name that this bss will go by.
-        ssid: string, The name of the ssid to brodcast.
-        hidden: bool, If true then the ssid will be hidden.
-        security: Security, The security settings to use.
+        name: The name that this bss will go by.
+        ssid: The name of the ssid to broadcast.
+        hidden: If true then the ssid will be hidden.
+        security: The security settings to use.
+        bssid: The bssid to use.
     """
 
-    def __init__(self, name, ssid, hidden=False, security=None, bssid=None):
+    def __init__(
+        self,
+        name: str,
+        ssid: str,
+        security: Security,
+        hidden: bool = False,
+        bssid: str | None = None,
+    ):
         self.name = name
         self.ssid = ssid
-        self.hidden = hidden
         self.security = security
+        self.hidden = hidden
         self.bssid = bssid
 
-    def generate_dict(self):
+    def generate_dict(self) -> dict[str, str | int]:
         """Returns: A dictionary of bss settings."""
-        settings = collections.OrderedDict()
+        settings: dict[str, str | int] = collections.OrderedDict()
         settings["bss"] = self.name
         if self.bssid:
             settings["bssid"] = self.bssid
@@ -44,9 +54,8 @@
             settings["ssid"] = self.ssid
             settings["ignore_broadcast_ssid"] = 1 if self.hidden else 0
 
-        if self.security:
-            security_settings = self.security.generate_dict()
-            for k, v in security_settings.items():
-                settings[k] = v
+        security_settings = self.security.generate_dict()
+        for k, v in security_settings.items():
+            settings[k] = v
 
         return settings
diff --git a/src/antlion/controllers/ap_lib/hostapd_config.py b/packages/antlion/controllers/ap_lib/hostapd_config.py
similarity index 76%
rename from src/antlion/controllers/ap_lib/hostapd_config.py
rename to packages/antlion/controllers/ap_lib/hostapd_config.py
index a886e04..a5cd392 100644
--- a/src/antlion/controllers/ap_lib/hostapd_config.py
+++ b/packages/antlion/controllers/ap_lib/hostapd_config.py
@@ -14,12 +14,14 @@
 
 import collections
 import logging
-from typing import FrozenSet
+from typing import Any, FrozenSet
 
 from antlion.controllers.ap_lib import hostapd_constants
+from antlion.controllers.ap_lib.hostapd_bss_settings import BssSettings
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
 
 
-def ht40_plus_allowed(channel):
+def ht40_plus_allowed(channel: int):
     """Returns: True iff HT40+ is enabled for this configuration."""
     channel_supported = (
         channel
@@ -30,7 +32,7 @@
     return channel_supported
 
 
-def ht40_minus_allowed(channel):
+def ht40_minus_allowed(channel: int):
     """Returns: True iff HT40- is enabled for this configuration."""
     channel_supported = (
         channel
@@ -41,11 +43,11 @@
     return channel_supported
 
 
-def get_frequency_for_channel(channel):
+def get_frequency_for_channel(channel: int):
     """The frequency associated with a given channel number.
 
     Args:
-        value: int channel number.
+        value: channel number.
 
     Returns:
         int, frequency in MHz associated with the channel.
@@ -55,14 +57,14 @@
         if channel == channel_iter:
             return frequency
     else:
-        raise ValueError("Unknown channel value: %r." % channel)
+        raise ValueError(f"Unknown channel value: {channel!r}.")
 
 
-def get_channel_for_frequency(frequency):
+def get_channel_for_frequency(frequency: int):
     """The channel number associated with a given frequency.
 
     Args:
-        value: int frequency in MHz.
+        value: frequency in MHz.
 
     Returns:
         int, frequency associated with the channel.
@@ -77,7 +79,211 @@
     All the settings for a router that are not part of an ssid.
     """
 
-    def _get_11ac_center_channel_from_channel(self, channel):
+    def __init__(
+        self,
+        interface: str | None = None,
+        mode: str | None = None,
+        channel: int | None = None,
+        frequency: int | None = None,
+        n_capabilities: list[Any] | None = None,
+        beacon_interval: int | None = None,
+        dtim_period: int | None = None,
+        frag_threshold: int | None = None,
+        rts_threshold: int | None = None,
+        short_preamble: bool | None = None,
+        ssid: str | None = None,
+        hidden: bool = False,
+        security: Security | None = None,
+        bssid: str | None = None,
+        force_wmm: bool | None = None,
+        pmf_support: int | None = None,
+        obss_interval: int | None = None,
+        vht_channel_width: Any | None = None,
+        vht_center_channel: int | None = None,
+        ac_capabilities: list[Any] | None = None,
+        beacon_footer: str = "",
+        spectrum_mgmt_required: bool | None = None,
+        scenario_name: str | None = None,
+        min_streams: int | None = None,
+        wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(),
+        bss_settings: list[Any] | None = None,
+        additional_parameters: dict[str, Any] | None = None,
+        set_ap_defaults_profile: str = "whirlwind",
+        ap_max_inactivity: int | None = None,
+    ) -> None:
+        """Construct a HostapdConfig.
+
+        You may specify channel or frequency, but not both.  Both options
+        are checked for validity (i.e. you can't specify an invalid channel
+        or a frequency that will not be accepted).
+
+        Args:
+            interface: The name of the interface to use.
+            mode: MODE_11x defined above.
+            channel: Channel number.
+            frequency: Frequency of channel.
+            n_capabilities: List of N_CAPABILITY_x defined above.
+            beacon_interval: Beacon interval of AP.
+            dtim_period: Include a DTIM every |dtim_period| beacons.
+            frag_threshold: Maximum outgoing data frame size.
+            rts_threshold: Maximum packet size without requiring explicit
+                protection via rts/cts or cts to self.
+            short_preamble: Whether to use a short preamble.
+            ssid: string, The name of the ssid to broadcast.
+            hidden: Should the ssid be hidden.
+            security: The security settings to use.
+            bssid: A MAC address like string for the BSSID.
+            force_wmm: True if we should force WMM on, False if we should
+                force it off, None if we shouldn't force anything.
+            pmf_support: One of PMF_SUPPORT_* above.  Controls whether the
+                client supports/must support 802.11w. If None, defaults to
+                required with wpa3, else defaults to disabled.
+            obss_interval: Interval in seconds that client should be
+                required to do background scans for overlapping BSSes.
+            vht_channel_width: Object channel width
+            vht_center_channel: Center channel of segment 0.
+            ac_capabilities: List of AC_CAPABILITY_x defined above.
+            beacon_footer: Containing (not validated) IE data to be
+                placed at the end of the beacon.
+            spectrum_mgmt_required: True if we require the DUT to support
+                spectrum management.
+            scenario_name: To be included in file names, instead
+                of the interface name.
+            min_streams: Number of spatial streams required.
+            wnm_features: WNM features to enable on the AP.
+            control_interface: The file name to use as the control interface.
+            bss_settings: The settings for all bss.
+            additional_parameters: A dictionary of additional parameters to add
+                to the hostapd config.
+            set_ap_defaults_profile: profile name to load defaults from
+            ap_max_inactivity: See hostapd.conf's ap_max_inactivity setting.
+        """
+        if n_capabilities is None:
+            n_capabilities = []
+        if ac_capabilities is None:
+            ac_capabilities = []
+        if bss_settings is None:
+            bss_settings = []
+        if additional_parameters is None:
+            additional_parameters = {}
+        if security is None:
+            security = Security()
+
+        self.set_ap_defaults_profile = set_ap_defaults_profile
+        self._interface = interface
+        if channel is not None and frequency is not None:
+            raise ValueError("Specify either frequency or channel " "but not both.")
+
+        unknown_caps = [
+            cap
+            for cap in n_capabilities
+            if cap not in hostapd_constants.N_CAPABILITIES_MAPPING
+        ]
+        if unknown_caps:
+            raise ValueError(f"Unknown capabilities: {unknown_caps!r}")
+
+        if channel:
+            self.channel = channel
+        elif frequency:
+            self.frequency = frequency
+        else:
+            raise ValueError("Specify either frequency or channel.")
+
+        self._n_capabilities = set(n_capabilities)
+        if force_wmm is not None:
+            self._wmm_enabled = force_wmm
+        elif self._n_capabilities:
+            self._wmm_enabled = True
+        if self._n_capabilities and mode is None:
+            mode = hostapd_constants.MODE_11N_PURE
+        self._mode = mode
+
+        if not self.supports_frequency(self.frequency):
+            raise ValueError(
+                "Configured a mode %s that does not support "
+                "frequency %d" % (self._mode, self.frequency)
+            )
+
+        self._beacon_interval = beacon_interval
+        self._dtim_period = dtim_period
+        self._frag_threshold = frag_threshold
+        self._rts_threshold = rts_threshold
+        self._short_preamble = short_preamble
+        self._ssid = ssid
+        self._hidden = hidden
+        self._security = security
+        self._bssid = bssid
+        # Default PMF Values
+        if pmf_support is None:
+            if self.security and self.security.security_mode is SecurityMode.WPA3:
+                # Set PMF required for WP3
+                self._pmf_support = hostapd_constants.PMF_SUPPORT_REQUIRED
+            elif self.security and self.security.security_mode.is_wpa3():
+                # Default PMF to enabled for WPA3 mixed modes (can be
+                # overwritten by explicitly provided value)
+                self._pmf_support = hostapd_constants.PMF_SUPPORT_ENABLED
+            else:
+                # Default PMD to disabled for all other modes (can be
+                # overwritten by explicitly provided value)
+                self._pmf_support = hostapd_constants.PMF_SUPPORT_DISABLED
+        elif pmf_support not in hostapd_constants.PMF_SUPPORT_VALUES:
+            raise ValueError(f"Invalid value for pmf_support: {pmf_support!r}")
+        elif (
+            pmf_support != hostapd_constants.PMF_SUPPORT_REQUIRED
+            and self.security
+            and self.security.security_mode is SecurityMode.WPA3
+        ):
+            raise ValueError("PMF support must be required with wpa3.")
+        else:
+            self._pmf_support = pmf_support
+        self._obss_interval = obss_interval
+        if self.is_11ac:
+            if str(vht_channel_width) == "40" or str(vht_channel_width) == "20":
+                self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_40
+            elif str(vht_channel_width) == "80":
+                self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_80
+            elif str(vht_channel_width) == "160":
+                self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_160
+            elif str(vht_channel_width) == "80+80":
+                self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_80_80
+            elif vht_channel_width is not None:
+                raise ValueError("Invalid channel width")
+            else:
+                logging.warning(
+                    "No channel bandwidth specified.  Using 80MHz for 11ac."
+                )
+                self._vht_oper_chwidth = 1
+            if vht_center_channel is not None:
+                self._vht_oper_centr_freq_seg0_idx = vht_center_channel
+            elif vht_channel_width == 20 and channel is not None:
+                self._vht_oper_centr_freq_seg0_idx = channel
+            elif vht_channel_width == 20 and frequency is not None:
+                self._vht_oper_centr_freq_seg0_idx = get_channel_for_frequency(
+                    frequency
+                )
+            else:
+                self._vht_oper_centr_freq_seg0_idx = (
+                    self._get_11ac_center_channel_from_channel(self.channel)
+                )
+            self._ac_capabilities = set(ac_capabilities)
+        self._beacon_footer = beacon_footer
+        self._spectrum_mgmt_required = spectrum_mgmt_required
+        self._scenario_name = scenario_name
+        self._min_streams = min_streams
+        self._wnm_features = wnm_features
+        self._additional_parameters = additional_parameters
+
+        self._bss_lookup: dict[str, BssSettings] = collections.OrderedDict()
+        for bss in bss_settings:
+            if bss.name in self._bss_lookup:
+                raise ValueError(
+                    "Cannot have multiple bss settings with the same name."
+                )
+            self._bss_lookup[bss.name] = bss
+
+        self._ap_max_inactivity = ap_max_inactivity
+
+    def _get_11ac_center_channel_from_channel(self, channel: int) -> int:
         """Returns the center channel of the selected channel band based
         on the channel and channel bandwidth provided.
         """
@@ -92,14 +298,10 @@
             lower_channel_bound, upper_channel_bound = channel_map
             if lower_channel_bound <= channel <= upper_channel_bound:
                 return lower_channel_bound + center_channel_delta
-        raise ValueError(
-            "Invalid channel for {channel_width}.".format(
-                channel_width=self._vht_oper_chwidth
-            )
-        )
+        raise ValueError(f"Invalid channel for {self._vht_oper_chwidth}.")
 
     @property
-    def _get_default_config(self):
+    def _get_default_config(self) -> dict[str, str | int | None]:
         """Returns: dict of default options for hostapd."""
         if self.set_ap_defaults_profile == "mistral":
             return collections.OrderedDict(
@@ -200,20 +402,20 @@
         self.frequency = get_frequency_for_channel(value)
 
     @property
-    def bssid(self):
+    def bssid(self) -> str | None:
         return self._bssid
 
     @bssid.setter
-    def bssid(self, value):
+    def bssid(self, value: str):
         self._bssid = value
 
     @property
-    def frequency(self):
-        """Returns: int, frequency for hostapd to listen on."""
+    def frequency(self) -> int:
+        """Returns: frequency for hostapd to listen on."""
         return self._frequency
 
     @frequency.setter
-    def frequency(self, value):
+    def frequency(self, value: int):
         """Sets the frequency for hostapd to listen on.
 
         Args:
@@ -221,21 +423,21 @@
 
         """
         if value not in hostapd_constants.CHANNEL_MAP:
-            raise ValueError("Tried to set an invalid frequency: %r." % value)
+            raise ValueError(f"Tried to set an invalid frequency: {value!r}.")
 
         self._frequency = value
 
     @property
-    def bss_lookup(self):
+    def bss_lookup(self) -> dict[str, BssSettings]:
         return self._bss_lookup
 
     @property
-    def ssid(self):
+    def ssid(self) -> str | None:
         """Returns: SsidSettings, The root Ssid settings being used."""
         return self._ssid
 
     @ssid.setter
-    def ssid(self, value):
+    def ssid(self, value: str):
         """Sets the ssid for the hostapd.
 
         Args:
@@ -250,30 +452,30 @@
         return self._hidden
 
     @hidden.setter
-    def hidden(self, value):
+    def hidden(self, value: bool):
         """Sets if this ssid is hidden.
 
         Args:
-            value: bool, If true the ssid will be hidden.
+            value: If true the ssid will be hidden.
         """
         self.hidden = value
 
     @property
-    def security(self):
+    def security(self) -> Security:
         """Returns: The security type being used."""
         return self._security
 
     @security.setter
-    def security(self, value):
+    def security(self, value: Security):
         """Sets the security options to use.
 
         Args:
-            value: Security, The type of security to use.
+            value: The type of security to use.
         """
         self._security = value
 
     @property
-    def ht_packet_capture_mode(self):
+    def ht_packet_capture_mode(self) -> str | None:
         """Get an appropriate packet capture HT parameter.
 
         When we go to configure a raw monitor we need to configure
@@ -299,26 +501,24 @@
         return "HT20"
 
     @property
-    def beacon_footer(self):
-        """Returns: bool _beacon_footer value."""
+    def beacon_footer(self) -> str:
         return self._beacon_footer
 
-    def beacon_footer(self, value):
+    @beacon_footer.setter
+    def beacon_footer(self, value: str):
         """Changes the beacon footer.
 
         Args:
-            value: bool, The beacon footer vlaue.
+            value: The beacon footer value.
         """
         self._beacon_footer = value
 
     @property
-    def scenario_name(self):
-        """Returns: string _scenario_name value, or None."""
+    def scenario_name(self) -> str | None:
         return self._scenario_name
 
     @property
-    def min_streams(self):
-        """Returns: int, _min_streams value, or None."""
+    def min_streams(self) -> int | None:
         return self._min_streams
 
     @property
@@ -329,219 +529,7 @@
     def wnm_features(self, value: FrozenSet[hostapd_constants.WnmFeature]):
         self._wnm_features = value
 
-    def __init__(
-        self,
-        interface=None,
-        mode=None,
-        channel=None,
-        frequency=None,
-        n_capabilities=[],
-        beacon_interval=None,
-        dtim_period=None,
-        frag_threshold=None,
-        rts_threshold=None,
-        short_preamble=None,
-        ssid=None,
-        hidden=False,
-        security=None,
-        bssid=None,
-        force_wmm=None,
-        pmf_support=None,
-        obss_interval=None,
-        vht_channel_width=None,
-        vht_center_channel=None,
-        ac_capabilities=[],
-        beacon_footer="",
-        spectrum_mgmt_required=None,
-        scenario_name=None,
-        min_streams=None,
-        wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(),
-        bss_settings=[],
-        additional_parameters={},
-        set_ap_defaults_profile="whirlwind",
-    ):
-        """Construct a HostapdConfig.
-
-        You may specify channel or frequency, but not both.  Both options
-        are checked for validity (i.e. you can't specify an invalid channel
-        or a frequency that will not be accepted).
-
-        Args:
-            interface: string, The name of the interface to use.
-            mode: string, MODE_11x defined above.
-            channel: int, channel number.
-            frequency: int, frequency of channel.
-            n_capabilities: list of N_CAPABILITY_x defined above.
-            beacon_interval: int, beacon interval of AP.
-            dtim_period: int, include a DTIM every |dtim_period| beacons.
-            frag_threshold: int, maximum outgoing data frame size.
-            rts_threshold: int, maximum packet size without requiring explicit
-                protection via rts/cts or cts to self.
-            short_preamble: Whether to use a short preamble.
-            ssid: string, The name of the ssid to brodcast.
-            hidden: bool, Should the ssid be hidden.
-            security: Security, the secuirty settings to use.
-            bssid: string, a MAC address like string for the BSSID.
-            force_wmm: True if we should force WMM on, False if we should
-                force it off, None if we shouldn't force anything.
-            pmf_support: one of PMF_SUPPORT_* above.  Controls whether the
-                client supports/must support 802.11w. If None, defaults to
-                required with wpa3, else defaults to disabled.
-            obss_interval: int, interval in seconds that client should be
-                required to do background scans for overlapping BSSes.
-            vht_channel_width: object channel width
-            vht_center_channel: int, center channel of segment 0.
-            ac_capabilities: list of AC_CAPABILITY_x defined above.
-            beacon_footer: string, containing (unvalidated) IE data to be
-                placed at the end of the beacon.
-            spectrum_mgmt_required: True if we require the DUT to support
-                spectrum management.
-            scenario_name: string to be included in file names, instead
-                of the interface name.
-            min_streams: int, number of spatial streams required.
-            wnm_features: WNM features to enable on the AP.
-            control_interface: The file name to use as the control interface.
-            bss_settings: The settings for all bss.
-            additional_parameters: A dictionary of additional parameters to add
-                to the hostapd config.
-            set_ap_defaults_profile: profile name to load defaults from
-        """
-        self.set_ap_defaults_profile = set_ap_defaults_profile
-        self._interface = interface
-        if channel is not None and frequency is not None:
-            raise ValueError("Specify either frequency or channel " "but not both.")
-
-        self._wmm_enabled = False
-        unknown_caps = [
-            cap
-            for cap in n_capabilities
-            if cap not in hostapd_constants.N_CAPABILITIES_MAPPING
-        ]
-        if unknown_caps:
-            raise ValueError("Unknown capabilities: %r" % unknown_caps)
-
-        self._frequency = None
-        if channel:
-            self.channel = channel
-        elif frequency:
-            self.frequency = frequency
-        else:
-            raise ValueError("Specify either frequency or channel.")
-        """
-        if set_ap_defaults_model:
-            ap_default_config = hostapd_ap_default_configs.APDefaultConfig(
-                profile_name=set_ap_defaults_model, frequency=self.frequency)
-            force_wmm = ap_default_config.force_wmm
-            beacon_interval = ap_default_config.beacon_interval
-            dtim_period = ap_default_config.dtim_period
-            short_preamble = ap_default_config.short_preamble
-            self._interface = ap_default_config.interface
-            mode = ap_default_config.mode
-            if ap_default_config.n_capabilities:
-                n_capabilities = ap_default_config.n_capabilities
-            if ap_default_config.ac_capabilities:
-                ap_default_config = ap_default_config.ac_capabilities
-        """
-
-        self._n_capabilities = set(n_capabilities)
-        if self._n_capabilities:
-            self._wmm_enabled = True
-        if self._n_capabilities and mode is None:
-            mode = hostapd_constants.MODE_11N_PURE
-        self._mode = mode
-
-        if not self.supports_frequency(self.frequency):
-            raise ValueError(
-                "Configured a mode %s that does not support "
-                "frequency %d" % (self._mode, self.frequency)
-            )
-
-        self._beacon_interval = beacon_interval
-        self._dtim_period = dtim_period
-        self._frag_threshold = frag_threshold
-        self._rts_threshold = rts_threshold
-        self._short_preamble = short_preamble
-        self._ssid = ssid
-        self._hidden = hidden
-        self._security = security
-        self._bssid = bssid
-        if force_wmm is not None:
-            if force_wmm:
-                self._wmm_enabled = 1
-            else:
-                self._wmm_enabled = 0
-        # Default PMF Values
-        if pmf_support is None:
-            if (
-                self.security
-                and self.security.security_mode_string == hostapd_constants.WPA3_STRING
-            ):
-                # Set PMF required for WP3
-                self._pmf_support = hostapd_constants.PMF_SUPPORT_REQUIRED
-            elif (
-                self.security
-                and self.security.security_mode_string
-                in hostapd_constants.WPA3_MODE_STRINGS
-            ):
-                # Default PMF to enabled for WPA3 mixed modes (can be
-                # overwritten by explicitly provided value)
-                self._pmf_support = hostapd_constants.PMF_SUPPORT_ENABLED
-            else:
-                # Default PMD to disabled for all other modes (can be
-                # overwritten by explicitly provided value)
-                self._pmf_support = hostapd_constants.PMF_SUPPORT_DISABLED
-        elif pmf_support not in hostapd_constants.PMF_SUPPORT_VALUES:
-            raise ValueError("Invalid value for pmf_support: %r" % pmf_support)
-        elif (
-            pmf_support != hostapd_constants.PMF_SUPPORT_REQUIRED
-            and self.security
-            and self.security.security_mode_string == hostapd_constants.WPA3_STRING
-        ):
-            raise ValueError("PMF support must be required with wpa3.")
-        else:
-            self._pmf_support = pmf_support
-        self._obss_interval = obss_interval
-        if self.is_11ac:
-            if str(vht_channel_width) == "40" or str(vht_channel_width) == "20":
-                self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_40
-            elif str(vht_channel_width) == "80":
-                self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_80
-            elif str(vht_channel_width) == "160":
-                self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_160
-            elif str(vht_channel_width) == "80+80":
-                self._vht_oper_chwidth = hostapd_constants.VHT_CHANNEL_WIDTH_80_80
-            elif vht_channel_width is not None:
-                raise ValueError("Invalid channel width")
-            else:
-                logging.warning(
-                    "No channel bandwidth specified.  Using 80MHz for 11ac."
-                )
-                self._vht_oper_chwidth = 1
-            if vht_center_channel is not None:
-                self._vht_oper_centr_freq_seg0_idx = vht_center_channel
-            elif vht_channel_width == 20:
-                self._vht_oper_centr_freq_seg0_idx = channel
-            else:
-                self._vht_oper_centr_freq_seg0_idx = (
-                    self._get_11ac_center_channel_from_channel(self.channel)
-                )
-            self._ac_capabilities = set(ac_capabilities)
-        self._beacon_footer = beacon_footer
-        self._spectrum_mgmt_required = spectrum_mgmt_required
-        self._scenario_name = scenario_name
-        self._min_streams = min_streams
-        self._wnm_features = wnm_features
-        self._additional_parameters = additional_parameters
-
-        self._bss_lookup = collections.OrderedDict()
-        for bss in bss_settings:
-            if bss.name in self._bss_lookup:
-                raise ValueError(
-                    "Cannot have multiple bss settings with the" " same name."
-                )
-            self._bss_lookup[bss.name] = bss
-
-    def __repr__(self):
+    def __repr__(self) -> str:
         return (
             "%s(mode=%r, channel=%r, frequency=%r, "
             "n_capabilities=%r, beacon_interval=%r, "
@@ -565,23 +553,23 @@
             )
         )
 
-    def supports_channel(self, value):
+    def supports_channel(self, value: int) -> bool:
         """Check whether channel is supported by the current hardware mode.
 
-        @param value: int channel to check.
+        @param value: channel to check.
         @return True iff the current mode supports the band of the channel.
 
         """
-        for freq, channel in hostapd_constants.CHANNEL_MAP.iteritems():
+        for freq, channel in hostapd_constants.CHANNEL_MAP.items():
             if channel == value:
                 return self.supports_frequency(freq)
 
         return False
 
-    def supports_frequency(self, frequency):
+    def supports_frequency(self, frequency: int) -> bool:
         """Check whether frequency is supported by the current hardware mode.
 
-        @param frequency: int frequency to check.
+        @param frequency: frequency to check.
         @return True iff the current mode supports the band of the frequency.
 
         """
@@ -624,7 +612,7 @@
 
         return True
 
-    def add_bss(self, bss):
+    def add_bss(self, bss: BssSettings) -> None:
         """Adds a new bss setting.
 
         Args:
@@ -635,11 +623,11 @@
 
         self._bss_lookup[bss.name] = bss
 
-    def remove_bss(self, bss_name):
+    def remove_bss(self, bss_name: str) -> None:
         """Removes a bss setting from the config."""
         del self._bss_lookup[bss_name]
 
-    def package_configs(self):
+    def package_configs(self) -> list[dict[str, str | int | None]]:
         """Package the configs.
 
         Returns:
@@ -667,7 +655,7 @@
             conf["vht_oper_centr_freq_seg0_idx"] = self._vht_oper_centr_freq_seg0_idx
             conf["vht_capab"] = self._hostapd_vht_capabilities
         if self._wmm_enabled is not None:
-            conf["wmm_enabled"] = self._wmm_enabled
+            conf["wmm_enabled"] = 1 if self._wmm_enabled else 0
         if self._require_ht:
             conf["require_ht"] = 1
         if self._require_vht:
@@ -695,18 +683,11 @@
             conf["ieee80211d"] = 1  # Required for local_pwr_constraint
             conf["local_pwr_constraint"] = 0  # No local constraint
             conf["spectrum_mgmt_required"] = 1  # Requires local_pwr_constraint
+        if self._ap_max_inactivity:
+            conf["ap_max_inactivity"] = self._ap_max_inactivity
 
-        if self._security:
-            for k, v in self._security.generate_dict().items():
-                conf[k] = v
-
-        all_conf = [conf]
-
-        for bss in self._bss_lookup.values():
-            bss_conf = collections.OrderedDict()
-            for k, v in (bss.generate_dict()).items():
-                bss_conf[k] = v
-            all_conf.append(bss_conf)
+        for k, v in self._security.generate_dict().items():
+            conf[k] = v
 
         for wnm_feature in self._wnm_features:
             if wnm_feature == hostapd_constants.WnmFeature.TIME_ADVERTISEMENT:
@@ -725,6 +706,8 @@
                     hostapd_constants.ENABLE_WNM_IPV6_NEIGHBOR_ADVERTISEMENT_MULTICAST_TO_UNICAST
                 )
 
+        all_conf = [conf] + [bss.generate_dict() for bss in self._bss_lookup.values()]
+
         if self._additional_parameters:
             all_conf.append(self._additional_parameters)
 
diff --git a/src/antlion/controllers/ap_lib/hostapd_constants.py b/packages/antlion/controllers/ap_lib/hostapd_constants.py
similarity index 94%
rename from src/antlion/controllers/ap_lib/hostapd_constants.py
rename to packages/antlion/controllers/ap_lib/hostapd_constants.py
index ae7ef85..51bb59e 100755
--- a/src/antlion/controllers/ap_lib/hostapd_constants.py
+++ b/packages/antlion/controllers/ap_lib/hostapd_constants.py
@@ -15,21 +15,40 @@
 # limitations under the License.
 
 import itertools
+from enum import Enum, StrEnum, auto, unique
+from typing import TypedDict
 
-from enum import Enum, auto, unique
-
+# TODO(http://b/286584981): Replace with BandType
 BAND_2G = "2g"
 BAND_5G = "5g"
+
+
+@unique
+class BandType(StrEnum):
+    BAND_2G = "2g"
+    BAND_5G = "5g"
+
+    def default_channel(self) -> int:
+        match self:
+            case BandType.BAND_2G:
+                return 6
+            case BandType.BAND_5G:
+                return 36
+
+
 CHANNEL_BANDWIDTH_20MHZ = 20
 CHANNEL_BANDWIDTH_40MHZ = 40
 CHANNEL_BANDWIDTH_80MHZ = 80
 CHANNEL_BANDWIDTH_160MHZ = 160
+
+# TODO(http://b/286584981): Replace with SecurityModeInt
 WEP = 0
 WPA1 = 1
 WPA2 = 2
 WPA3 = 2  # same as wpa2 and wpa2/wpa3, distinguished by wpa_key_mgmt
-MIXED = 3  # applies to wpa/wpa2, and wpa/wpa2/wpa3, distinquished by wpa_key_mgmt
+MIXED = 3  # applies to wpa/wpa2, and wpa/wpa2/wpa3, distinguished by wpa_key_mgmt
 ENT = 4  # get the correct constant
+
 MAX_WPA_PSK_LENGTH = 64
 MIN_WPA_PSK_LENGTH = 8
 MAX_WPA_PASSWORD_LENGTH = 63
@@ -38,6 +57,8 @@
 WPA2_DEFAULT_CIPER = "CCMP"
 WPA_GROUP_KEY_ROTATION_TIME = 600
 WPA_STRICT_REKEY_DEFAULT = True
+
+# TODO(http://b/286584981): Replace these with SecurityMode enum
 WEP_STRING = "wep"
 WPA_STRING = "wpa"
 WPA2_STRING = "wpa2"
@@ -46,10 +67,14 @@
 WPA2_WPA3_MIXED_STRING = "wpa2/wpa3"
 WPA_WPA2_WPA3_MIXED_STRING = "wpa/wpa2/wpa3"
 ENT_STRING = "ent"
+
+# TODO(http://b/286584981): Replace with KeyManagement
 ENT_KEY_MGMT = "WPA-EAP"
 WPA_PSK_KEY_MGMT = "WPA-PSK"
 SAE_KEY_MGMT = "SAE"
 DUAL_WPA_PSK_SAE_KEY_MGMT = "WPA-PSK SAE"
+
+# TODO(http://b/286584981): Replace with SecurityMode.security_mode_int
 SECURITY_STRING_TO_SECURITY_MODE_INT = {
     WPA_STRING: WPA1,
     WPA2_STRING: WPA2,
@@ -60,6 +85,8 @@
     WEP_STRING: WEP,
     ENT_STRING: ENT,
 }
+
+# TODO(http://b/286584981): Replace with SecurityMode.key_management
 SECURITY_STRING_TO_WPA_KEY_MGMT = {
     WPA_STRING: WPA_PSK_KEY_MGMT,
     WPA2_STRING: WPA_PSK_KEY_MGMT,
@@ -68,8 +95,8 @@
     WPA2_WPA3_MIXED_STRING: DUAL_WPA_PSK_SAE_KEY_MGMT,
     WPA_WPA2_WPA3_MIXED_STRING: DUAL_WPA_PSK_SAE_KEY_MGMT,
 }
-WPA3_MODE_STRINGS = {WPA3_STRING, WPA2_WPA3_MIXED_STRING, WPA_WPA2_WPA3_MIXED_STRING}
 
+# TODO(http://b/286584981): Replace with SecurityMode.fuchsia_security_type
 SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY = {
     WEP_STRING: WEP_STRING,
     WPA_STRING: WPA_STRING,
@@ -91,8 +118,11 @@
 WEP_HEX_LENGTH = [10, 26, 32, 58]
 WEP_STR_LENGTH = [5, 13, 16]
 WEP_DEFAULT_STR_LENGTH = 13
+
+# TODO(http://b/286584981): Replace with BandType.default_channel()
 AP_DEFAULT_CHANNEL_2G = 6
 AP_DEFAULT_CHANNEL_5G = 36
+
 AP_DEFAULT_MAX_SSIDS_2G = 8
 AP_DEFAULT_MAX_SSIDS_5G = 8
 AP_SSID_LENGTH_2G = 8
@@ -360,10 +390,16 @@
 
 DRIVER_NAME = "nl80211"
 
+
+class VHTChannelWidth(TypedDict):
+    delta: int
+    channels: list[tuple[int, int]]
+
+
 CENTER_CHANNEL_MAP = {
-    VHT_CHANNEL_WIDTH_40: {
-        "delta": 2,
-        "channels": (
+    VHT_CHANNEL_WIDTH_40: VHTChannelWidth(
+        delta=2,
+        channels=[
             (36, 40),
             (44, 48),
             (52, 56),
@@ -376,20 +412,23 @@
             (140, 144),
             (149, 153),
             (157, 161),
-        ),
-    },
-    VHT_CHANNEL_WIDTH_80: {
-        "delta": 6,
-        "channels": (
+        ],
+    ),
+    VHT_CHANNEL_WIDTH_80: VHTChannelWidth(
+        delta=6,
+        channels=[
             (36, 48),
             (52, 64),
             (100, 112),
             (116, 128),
             (132, 144),
             (149, 161),
-        ),
-    },
-    VHT_CHANNEL_WIDTH_160: {"delta": 14, "channels": ((36, 64), (100, 128))},
+        ],
+    ),
+    VHT_CHANNEL_WIDTH_160: VHTChannelWidth(
+        delta=14,
+        channels=[(36, 64), (100, 128)],
+    ),
 }
 
 OFDM_DATA_RATES = {"supported_rates": "60 90 120 180 240 360 480 540"}
@@ -556,7 +595,10 @@
 ENABLE_RRM_NEIGHBOR_REPORT = {"rrm_neighbor_report": 1}
 
 # Wireless Network Management (AKA 802.11v) features.
-ENABLE_WNM_TIME_ADVERTISEMENT = {"time_advertisement": 2, "time_zone": "EST5"}
+ENABLE_WNM_TIME_ADVERTISEMENT: dict[str, int | str] = {
+    "time_advertisement": 2,
+    "time_zone": "EST5",
+}
 ENABLE_WNM_SLEEP_MODE = {"wnm_sleep_mode": 1}
 ENABLE_WNM_BSS_TRANSITION_MANAGEMENT = {"bss_transition": 1}
 ENABLE_WNM_PROXY_ARP = {"proxy_arp": 1}
@@ -885,7 +927,7 @@
     165: {20},
 }
 
-ALL_CHANNELS = {**ALL_CHANNELS_2G, **ALL_CHANNELS_5G}
+ALL_CHANNELS = ALL_CHANNELS_2G | ALL_CHANNELS_5G
 
 
 @unique
diff --git a/packages/antlion/controllers/ap_lib/hostapd_security.py b/packages/antlion/controllers/ap_lib/hostapd_security.py
new file mode 100644
index 0000000..3341598
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/hostapd_security.py
@@ -0,0 +1,457 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import collections
+import string
+from enum import Enum, StrEnum, auto, unique
+
+from honeydew.affordances.connectivity.wlan.utils.types import SecurityProtocol
+
+from antlion.controllers.ap_lib import hostapd_constants
+
+
+class SecurityModeInt(int, Enum):
+    """Possible values for hostapd's "wpa" config option.
+
+    The int value is a bit field that can enable WPA and/or WPA2.
+
+    bit0 = enable WPA defined by IEEE 802.11i/D3.0
+    bit1 = enable RNA (WPA2) defined by IEEE 802.11i/RSN
+    bit2 = enable WAPI (rejected/withdrawn)
+    bit3 = enable OSEN (ENT)
+    """
+
+    WEP = 0
+    WPA1 = 1
+    WPA2 = 2
+    WPA3 = 2  # same as wpa2 and wpa2/wpa3; distinguished by wpa_key_mgmt
+    MIXED = 3  # applies to wpa/wpa2 and wpa/wpa2/wpa3; distinguished by wpa_key_mgmt
+    ENT = 8
+
+    def __str__(self):
+        return str(self.value)
+
+
+@unique
+class KeyManagement(StrEnum):
+    SAE = "SAE"
+    WPA_PSK = "WPA-PSK"
+    WPA_PSK_SAE = "WPA-PSK SAE"
+    ENT = "WPA-EAP"
+
+
+# TODO(http://b/286584981): This is currently only being used for OpenWRT.
+# Investigate whether we can replace KeyManagement with OpenWRTEncryptionMode.
+@unique
+class OpenWRTEncryptionMode(StrEnum):
+    """Combination of Wi-Fi encryption mode and ciphers.
+
+    Only used by OpenWRT.
+
+    Besides the encryption mode, the encryption option also specifies the group and peer
+    ciphers to use. To override the cipher, the value of encryption must be given in the
+    form "mode+cipher". This enum contains all possible combinations.
+
+    See https://openwrt.org/docs/guide-user/network/wifi/basic#encryption_modes.
+    """
+
+    NONE = "none"
+    """No authentication, no ciphers"""
+    SAE = "sae"
+    """WPA3 Personal (SAE) using CCMP cipher"""
+    SAE_MIXED = "sae-mixed"
+    """WPA2/WPA3 Personal (PSK/SAE) mixed mode using CCMP cipher"""
+    PSK2_TKIP_CCMP = "psk2+tkip+ccmp"
+    """WPA2 Personal (PSK) using TKIP and CCMP ciphers"""
+    PSK2_TKIP_AES = "psk2+tkip+aes"
+    """WPA2 Personal (PSK) using TKIP and AES ciphers"""
+    PSK2_TKIP = "psk2+tkip"
+    """WPA2 Personal (PSK) using TKIP cipher"""
+    PSK2_CCMP = "psk2+ccmp"
+    """WPA2 Personal (PSK) using CCMP cipher"""
+    PSK2_AES = "psk2+aes"
+    """WPA2 Personal (PSK) using AES cipher"""
+    PSK2 = "psk2"
+    """WPA2 Personal (PSK) using CCMP cipher"""
+    PSK_TKIP_CCMP = "psk+tkip+ccmp"
+    """WPA Personal (PSK) using TKIP and CCMP ciphers"""
+    PSK_TKIP_AES = "psk+tkip+aes"
+    """WPA Personal (PSK) using TKIP and AES ciphers"""
+    PSK_TKIP = "psk+tkip"
+    """WPA Personal (PSK) using TKIP cipher"""
+    PSK_CCMP = "psk+ccmp"
+    """WPA Personal (PSK) using CCMP cipher"""
+    PSK_AES = "psk+aes"
+    """WPA Personal (PSK) using AES cipher"""
+    PSK = "psk"
+    """WPA Personal (PSK) using CCMP cipher"""
+    PSK_MIXED_TKIP_CCMP = "psk-mixed+tkip+ccmp"
+    """WPA/WPA2 Personal (PSK) mixed mode using TKIP and CCMP ciphers"""
+    PSK_MIXED_TKIP_AES = "psk-mixed+tkip+aes"
+    """WPA/WPA2 Personal (PSK) mixed mode using TKIP and AES ciphers"""
+    PSK_MIXED_TKIP = "psk-mixed+tkip"
+    """WPA/WPA2 Personal (PSK) mixed mode using TKIP cipher"""
+    PSK_MIXED_CCMP = "psk-mixed+ccmp"
+    """WPA/WPA2 Personal (PSK) mixed mode using CCMP cipher"""
+    PSK_MIXED_AES = "psk-mixed+aes"
+    """WPA/WPA2 Personal (PSK) mixed mode using AES cipher"""
+    PSK_MIXED = "psk-mixed"
+    """WPA/WPA2 Personal (PSK) mixed mode using CCMP cipher"""
+    WEP = "wep"
+    """defaults to “open system” authentication aka wep+open using RC4 cipher"""
+    WEP_OPEN = "wep+open"
+    """“open system” authentication using RC4 cipher"""
+    WEP_SHARED = "wep+shared"
+    """“shared key” authentication using RC4 cipher"""
+    WPA3 = "wpa3"
+    """WPA3 Enterprise using CCMP cipher"""
+    WPA3_MIXED = "wpa3-mixed"
+    """WPA3/WPA2 Enterprise using CCMP cipher"""
+    WPA2_TKIP_CCMP = "wpa2+tkip+ccmp"
+    """WPA2 Enterprise using TKIP and CCMP ciphers"""
+    WPA2_TKIP_AES = "wpa2+tkip+aes"
+    """WPA2 Enterprise using TKIP and AES ciphers"""
+    WPA2_CCMP = "wpa2+ccmp"
+    """WPA2 Enterprise using CCMP cipher"""
+    WPA2_AES = "wpa2+aes'"
+    """WPA2 Enterprise using AES cipher"""
+    WPA2 = "wpa2"
+    """WPA2 Enterprise using CCMP cipher"""
+    WPA2_TKIP = "wpa2+tkip"
+    """WPA2 Enterprise using TKIP cipher"""
+    WPA_TKIP_CCMP = "wpa+tkip+ccmp"
+    """WPA Enterprise using TKIP and CCMP ciphers"""
+    WPA_TKIP_AES = "wpa+tkip+aes"
+    """WPA Enterprise using TKIP and AES ciphers"""
+    WPA_CCMP = "wpa+ccmp"
+    """WPA Enterprise using CCMP cipher"""
+    WPA_AES = "wpa+aes"
+    """WPA Enterprise using AES cipher"""
+    WPA_TKIP = "wpa+tkip"
+    """WPA Enterprise using TKIP cipher"""
+    WPA = "wpa"
+    """WPA Enterprise using CCMP cipher"""
+    WPA_MIXED_TKIP_CCMP = "wpa-mixed+tkip+ccmp"
+    """WPA/WPA2 Enterprise mixed mode using TKIP and CCMP ciphers"""
+    WPA_MIXED_TKIP_AES = "wpa-mixed+tkip+aes"
+    """WPA/WPA2 Enterprise mixed mode using TKIP and AES ciphers"""
+    WPA_MIXED_TKIP = "wpa-mixed+tkip"
+    """WPA/WPA2 Enterprise mixed mode using TKIP cipher"""
+    WPA_MIXED_CCMP = "wpa-mixed+ccmp"
+    """WPA/WPA2 Enterprise mixed mode using CCMP cipher"""
+    WPA_MIXED_AES = "wpa-mixed+aes"
+    """WPA/WPA2 Enterprise mixed mode using AES cipher"""
+    WPA_MIXED = "wpa-mixed"
+    """WPA/WPA2 Enterprise mixed mode using CCMP cipher"""
+    OWE = "owe"
+    """Opportunistic Wireless Encryption (OWE) using CCMP cipher"""
+
+
+@unique
+class FuchsiaSecurityType(StrEnum):
+    """Fuchsia supported security types.
+
+    Defined by the fuchsia.wlan.policy.SecurityType FIDL.
+
+    https://cs.opensource.google/fuchsia/fuchsia/+/main:sdk/fidl/fuchsia.wlan.policy/types.fidl
+    """
+
+    NONE = "none"
+    WEP = "wep"
+    WPA = "wpa"
+    WPA2 = "wpa2"
+    WPA3 = "wpa3"
+
+
+@unique
+class SecurityMode(StrEnum):
+    OPEN = auto()
+    WEP = auto()
+    WPA = auto()
+    WPA2 = auto()
+    WPA_WPA2 = auto()
+    WPA3 = auto()
+    WPA2_WPA3 = auto()
+    WPA_WPA2_WPA3 = auto()
+    ENT = auto()
+
+    def security_mode_int(self) -> SecurityModeInt:
+        match self:
+            case SecurityMode.OPEN:
+                raise TypeError("Open security doesn't have a SecurityModeInt")
+            case SecurityMode.WEP:
+                return SecurityModeInt.WEP
+            case SecurityMode.WPA:
+                return SecurityModeInt.WPA1
+            case SecurityMode.WPA2:
+                return SecurityModeInt.WPA2
+            case SecurityMode.WPA_WPA2:
+                return SecurityModeInt.MIXED
+            case SecurityMode.WPA3:
+                return SecurityModeInt.WPA3
+            case SecurityMode.WPA2_WPA3:
+                return SecurityModeInt.WPA3
+            case SecurityMode.WPA_WPA2_WPA3:
+                return SecurityModeInt.MIXED
+            case SecurityMode.ENT:
+                return SecurityModeInt.ENT
+
+    def key_management(self) -> KeyManagement | None:
+        match self:
+            case SecurityMode.OPEN:
+                return None
+            case SecurityMode.WEP:
+                return None
+            case SecurityMode.WPA:
+                return KeyManagement.WPA_PSK
+            case SecurityMode.WPA2:
+                return KeyManagement.WPA_PSK
+            case SecurityMode.WPA_WPA2:
+                return KeyManagement.WPA_PSK
+            case SecurityMode.WPA3:
+                return KeyManagement.SAE
+            case SecurityMode.WPA2_WPA3:
+                return KeyManagement.WPA_PSK_SAE
+            case SecurityMode.WPA_WPA2_WPA3:
+                return KeyManagement.WPA_PSK_SAE
+            case SecurityMode.ENT:
+                return KeyManagement.ENT
+
+    def fuchsia_security_type(self) -> FuchsiaSecurityType:
+        match self:
+            case SecurityMode.OPEN:
+                return FuchsiaSecurityType.NONE
+            case SecurityMode.WEP:
+                return FuchsiaSecurityType.WEP
+            case SecurityMode.WPA:
+                return FuchsiaSecurityType.WPA
+            case SecurityMode.WPA2:
+                return FuchsiaSecurityType.WPA2
+            case SecurityMode.WPA_WPA2:
+                return FuchsiaSecurityType.WPA2
+            case SecurityMode.WPA3:
+                return FuchsiaSecurityType.WPA3
+            case SecurityMode.WPA2_WPA3:
+                return FuchsiaSecurityType.WPA3
+            case SecurityMode.WPA_WPA2_WPA3:
+                return FuchsiaSecurityType.WPA3
+            case SecurityMode.ENT:
+                raise NotImplementedError(
+                    f'Fuchsia has not implemented support for security mode "{self}"'
+                )
+
+    def is_wpa3(self) -> bool:
+        match self:
+            case SecurityMode.OPEN:
+                return False
+            case SecurityMode.WEP:
+                return False
+            case SecurityMode.WPA:
+                return False
+            case SecurityMode.WPA2:
+                return False
+            case SecurityMode.WPA_WPA2:
+                return False
+            case SecurityMode.WPA3:
+                return True
+            case SecurityMode.WPA2_WPA3:
+                return True
+            case SecurityMode.WPA_WPA2_WPA3:
+                return True
+            case SecurityMode.ENT:
+                return False
+        raise TypeError("Unknown security mode")
+
+    def protocol(self, enterprise: bool = False) -> SecurityProtocol:
+        match self:
+            case SecurityMode.OPEN:
+                return SecurityProtocol.OPEN
+            case SecurityMode.WEP:
+                return SecurityProtocol.WEP
+            case SecurityMode.WPA:
+                return SecurityProtocol.WPA1
+            case SecurityMode.WPA2:
+                return (
+                    SecurityProtocol.WPA2_ENTERPRISE
+                    if enterprise
+                    else SecurityProtocol.WPA2_PERSONAL
+                )
+            case SecurityMode.WPA_WPA2:
+                return (
+                    SecurityProtocol.WPA2_ENTERPRISE
+                    if enterprise
+                    else SecurityProtocol.WPA2_PERSONAL
+                )
+            case SecurityMode.WPA3:
+                return (
+                    SecurityProtocol.WPA3_ENTERPRISE
+                    if enterprise
+                    else SecurityProtocol.WPA3_PERSONAL
+                )
+            case SecurityMode.WPA2_WPA3:
+                return (
+                    SecurityProtocol.WPA3_ENTERPRISE
+                    if enterprise
+                    else SecurityProtocol.WPA3_PERSONAL
+                )
+            case SecurityMode.WPA_WPA2_WPA3:
+                return (
+                    SecurityProtocol.WPA3_ENTERPRISE
+                    if enterprise
+                    else SecurityProtocol.WPA3_PERSONAL
+                )
+            case SecurityMode.ENT:
+                raise NotImplementedError(
+                    f'Fuchsia has not implemented support for security mode "{self}"'
+                )
+
+
+class Security(object):
+    """The Security class for hostapd representing some of the security
+    settings that are allowed in hostapd.  If needed more can be added.
+    """
+
+    def __init__(
+        self,
+        security_mode: SecurityMode = SecurityMode.OPEN,
+        password: str | None = None,
+        wpa_cipher: str | None = hostapd_constants.WPA_DEFAULT_CIPHER,
+        wpa2_cipher: str | None = hostapd_constants.WPA2_DEFAULT_CIPER,
+        wpa_group_rekey: int = hostapd_constants.WPA_GROUP_KEY_ROTATION_TIME,
+        wpa_strict_rekey: bool = hostapd_constants.WPA_STRICT_REKEY_DEFAULT,
+        wep_default_key: int = hostapd_constants.WEP_DEFAULT_KEY,
+        radius_server_ip: str | None = None,
+        radius_server_port: int | None = None,
+        radius_server_secret: str | None = None,
+    ) -> None:
+        """Gather all of the security settings for WPA-PSK.  This could be
+           expanded later.
+
+        Args:
+            security_mode: Type of security mode.
+            password: The PSK or passphrase for the security mode.
+            wpa_cipher: The cipher to be used for wpa.
+                        Options: TKIP, CCMP, TKIP CCMP
+                        Default: TKIP
+            wpa2_cipher: The cipher to be used for wpa2.
+                         Options: TKIP, CCMP, TKIP CCMP
+                         Default: CCMP
+            wpa_group_rekey: How often to refresh the GTK regardless of network
+                             changes.
+                             Options: An integer in seconds, None
+                             Default: 600 seconds
+            wpa_strict_rekey: Whether to do a group key update when client
+                              leaves the network or not.
+                              Options: True, False
+                              Default: True
+            wep_default_key: The wep key number to use when transmitting.
+            radius_server_ip: Radius server IP for Enterprise auth.
+            radius_server_port: Radius server port for Enterprise auth.
+            radius_server_secret: Radius server secret for Enterprise auth.
+        """
+        self.security_mode = security_mode
+        self.wpa_cipher = wpa_cipher
+        self.wpa2_cipher = wpa2_cipher
+        self.wpa_group_rekey = wpa_group_rekey
+        self.wpa_strict_rekey = wpa_strict_rekey
+        self.wep_default_key = wep_default_key
+        self.radius_server_ip = radius_server_ip
+        self.radius_server_port = radius_server_port
+        self.radius_server_secret = radius_server_secret
+        if password:
+            if self.security_mode is SecurityMode.WEP:
+                if len(password) in hostapd_constants.WEP_STR_LENGTH:
+                    self.password = f'"{password}"'
+                elif len(password) in hostapd_constants.WEP_HEX_LENGTH and all(
+                    c in string.hexdigits for c in password
+                ):
+                    self.password = password
+                else:
+                    raise ValueError(
+                        "WEP key must be a hex string of %s characters"
+                        % hostapd_constants.WEP_HEX_LENGTH
+                    )
+            else:
+                if (
+                    len(password) < hostapd_constants.MIN_WPA_PSK_LENGTH
+                    or len(password) > hostapd_constants.MAX_WPA_PSK_LENGTH
+                ):
+                    raise ValueError(
+                        "Password must be a minumum of %s characters and a maximum of %s"
+                        % (
+                            hostapd_constants.MIN_WPA_PSK_LENGTH,
+                            hostapd_constants.MAX_WPA_PSK_LENGTH,
+                        )
+                    )
+                else:
+                    self.password = password
+        else:
+            self.password = None
+
+    def __str__(self) -> str:
+        return self.security_mode
+
+    def generate_dict(self) -> dict[str, str | int]:
+        """Returns: an ordered dictionary of settings"""
+        if self.security_mode is SecurityMode.OPEN:
+            return {}
+
+        settings: dict[str, str | int] = collections.OrderedDict()
+
+        if self.security_mode is SecurityMode.WEP:
+            settings["wep_default_key"] = self.wep_default_key
+            if self.password is not None:
+                settings[f"wep_key{self.wep_default_key}"] = self.password
+        elif self.security_mode == SecurityMode.ENT:
+            if self.radius_server_ip is not None:
+                settings["auth_server_addr"] = self.radius_server_ip
+            if self.radius_server_port is not None:
+                settings["auth_server_port"] = self.radius_server_port
+            if self.radius_server_secret is not None:
+                settings["auth_server_shared_secret"] = self.radius_server_secret
+            settings["wpa_key_mgmt"] = hostapd_constants.ENT_KEY_MGMT
+            settings["ieee8021x"] = hostapd_constants.IEEE8021X
+            settings["wpa"] = hostapd_constants.WPA2
+        else:
+            settings["wpa"] = self.security_mode.security_mode_int().value
+            if self.password:
+                if len(self.password) == hostapd_constants.MAX_WPA_PSK_LENGTH:
+                    settings["wpa_psk"] = self.password
+                else:
+                    settings["wpa_passphrase"] = self.password
+            # For wpa, wpa/wpa2, and wpa/wpa2/wpa3, add wpa_pairwise
+            if self.wpa_cipher and (
+                self.security_mode is SecurityMode.WPA
+                or self.security_mode is SecurityMode.WPA_WPA2
+                or self.security_mode is SecurityMode.WPA_WPA2_WPA3
+            ):
+                settings["wpa_pairwise"] = self.wpa_cipher
+            # For wpa/wpa2, wpa2, wpa3, and wpa2/wpa3, and wpa/wpa2, wpa3, add rsn_pairwise
+            if self.wpa2_cipher and (
+                self.security_mode is SecurityMode.WPA_WPA2
+                or self.security_mode is SecurityMode.WPA2
+                or self.security_mode is SecurityMode.WPA2_WPA3
+                or self.security_mode is SecurityMode.WPA3
+            ):
+                settings["rsn_pairwise"] = self.wpa2_cipher
+            # Add wpa_key_mgmt based on security mode string
+            wpa_key_mgmt = self.security_mode.key_management()
+            if wpa_key_mgmt is not None:
+                settings["wpa_key_mgmt"] = str(wpa_key_mgmt)
+            if self.wpa_group_rekey:
+                settings["wpa_group_rekey"] = self.wpa_group_rekey
+            if self.wpa_strict_rekey:
+                settings["wpa_strict_rekey"] = hostapd_constants.WPA_STRICT_REKEY
+
+        return settings
diff --git a/packages/antlion/controllers/ap_lib/hostapd_utils.py b/packages/antlion/controllers/ap_lib/hostapd_utils.py
new file mode 100644
index 0000000..060777e
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/hostapd_utils.py
@@ -0,0 +1,97 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from antlion import utils
+from antlion.controllers.ap_lib import hostapd_constants
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
+
+
+def generate_random_password(
+    security_mode: SecurityMode = SecurityMode.OPEN,
+    length: int | None = None,
+    hex: int | None = None,
+) -> str:
+    """Generates a random password. Defaults to an 8 character ASCII password.
+
+    Args:
+        security_mode: Used to determine if length should be WEP compatible
+            (useful for generated tests to simply pass in security mode)
+        length: Length of password to generate. Defaults to 8, unless
+            security_mode is WEP, then 13
+        hex: If True, generates a hex string, else ascii
+    """
+    if hex:
+        generator_func = utils.rand_hex_str
+    else:
+        generator_func = utils.rand_ascii_str
+
+    if length:
+        return generator_func(length)
+    if security_mode is SecurityMode.WEP:
+        return generator_func(hostapd_constants.WEP_DEFAULT_STR_LENGTH)
+    else:
+        return generator_func(hostapd_constants.MIN_WPA_PSK_LENGTH)
+
+
+def verify_interface(interface: str, valid_interfaces: list[str]) -> None:
+    """Raises error if interface is missing or invalid
+
+    Args:
+        interface: interface name
+        valid_interfaces: valid interface names
+    """
+    if interface not in valid_interfaces:
+        raise ValueError(f"Invalid interface name was passed: {interface}")
+
+
+def verify_security_mode(
+    security_profile: Security, valid_security_modes: list[SecurityMode]
+) -> None:
+    """Raises error if security mode is not in list of valid security modes.
+
+    Args:
+        security_profile: Security to verify
+        valid_security_modes: Valid security modes for a profile.
+    """
+    if security_profile.security_mode not in valid_security_modes:
+        raise ValueError(
+            f"Invalid Security Mode: {security_profile.security_mode}; "
+            f"Valid Security Modes for this profile: {valid_security_modes}"
+        )
+
+
+def verify_cipher(security_profile: Security, valid_ciphers: list[str]) -> None:
+    """Raise error if cipher is not in list of valid ciphers.
+
+    Args:
+        security_profile: Security profile to verify
+        valid_ciphers: A list of valid ciphers for security_profile.
+    """
+    if security_profile.security_mode is SecurityMode.OPEN:
+        raise ValueError("Security mode is open.")
+    elif security_profile.security_mode is SecurityMode.WPA:
+        if security_profile.wpa_cipher not in valid_ciphers:
+            raise ValueError(
+                f"Invalid WPA Cipher: {security_profile.wpa_cipher}. "
+                f"Valid WPA Ciphers for this profile: {valid_ciphers}"
+            )
+    elif security_profile.security_mode is SecurityMode.WPA2:
+        if security_profile.wpa2_cipher not in valid_ciphers:
+            raise ValueError(
+                f"Invalid WPA2 Cipher: {security_profile.wpa2_cipher}. "
+                f"Valid WPA2 Ciphers for this profile: {valid_ciphers}"
+            )
+    else:
+        raise ValueError(f"Invalid Security Mode: {security_profile.security_mode}")
diff --git a/src/antlion/controllers/ap_lib/radio_measurement.py b/packages/antlion/controllers/ap_lib/radio_measurement.py
similarity index 100%
rename from src/antlion/controllers/ap_lib/radio_measurement.py
rename to packages/antlion/controllers/ap_lib/radio_measurement.py
diff --git a/src/antlion/controllers/ap_lib/radvd.py b/packages/antlion/controllers/ap_lib/radvd.py
similarity index 76%
rename from src/antlion/controllers/ap_lib/radvd.py
rename to packages/antlion/controllers/ap_lib/radvd.py
index 216ad0e..b5d0a65 100644
--- a/src/antlion/controllers/ap_lib/radvd.py
+++ b/packages/antlion/controllers/ap_lib/radvd.py
@@ -17,15 +17,17 @@
 import tempfile
 import time
 
-from typing import Any, Optional
+from tenacity import retry, retry_if_exception_type, stop_after_delay
 
 from antlion.controllers.ap_lib.radvd_config import RadvdConfig
 from antlion.controllers.utils_lib.commands import shell
 from antlion.libs.proc import job
+from antlion.logger import LogLevel
+from antlion.runner import Runner
 
 
-class Error(Exception):
-    """An error caused by radvd."""
+class RadvdStartError(Exception):
+    """Radvd failed to start."""
 
 
 class Radvd(object):
@@ -41,10 +43,10 @@
 
     def __init__(
         self,
-        runner: Any,
+        runner: Runner,
         interface: str,
-        working_dir: Optional[str] = None,
-        radvd_binary: Optional[str] = None,
+        working_dir: str | None = None,
+        radvd_binary: str | None = None,
     ) -> None:
         """
         Args:
@@ -61,7 +63,7 @@
             radvd_binary = "radvd"
         else:
             logging.debug(f"Using radvd binary located at {radvd_binary}")
-        if working_dir is None and runner == job.run:
+        if working_dir is None and runner.run == job.run:
             working_dir = tempfile.gettempdir()
         else:
             working_dir = "/tmp"
@@ -69,21 +71,20 @@
         self._runner = runner
         self._interface = interface
         self._working_dir = working_dir
-        self.config: Optional[RadvdConfig] = None
-        self._shell = shell.ShellCommand(runner, working_dir)
+        self.config: RadvdConfig | None = None
+        self._shell = shell.ShellCommand(runner)
         self._log_file = f"{working_dir}/radvd-{self._interface}.log"
         self._config_file = f"{working_dir}/radvd-{self._interface}.conf"
         self._pid_file = f"{working_dir}/radvd-{self._interface}.pid"
         self._ps_identifier = f"{self._radvd_binary}.*{self._config_file}"
 
-    def start(self, config: RadvdConfig, timeout: int = 60) -> None:
+    def start(self, config: RadvdConfig) -> None:
         """Starts radvd
 
         Starts the radvd daemon and runs it in the background.
 
         Args:
             config: Configs to start the radvd with.
-            timeout: Time to wait for radvd  to come up.
 
         Returns:
             True if the daemon could be started. Note that the daemon can still
@@ -91,6 +92,9 @@
             of time to be produced, and because the daemon runs indefinitely
             it's impossible to wait on. If you need to check if configs are ok
             then periodic checks to is_running and logs should be used.
+
+        Raises:
+            RadvdStartError: when a radvd error is found or process is dead
         """
         if self.is_alive():
             self.stop()
@@ -101,18 +105,28 @@
         self._shell.delete_file(self._config_file)
         self._write_configs(self.config)
 
+        try:
+            self._launch()
+        except RadvdStartError:
+            self.stop()
+            raise
+
+    # TODO(http://b/372534563): Remove retries once the source of SIGINT is
+    # found and a fix is implemented.
+    @retry(stop=stop_after_delay(30), retry=retry_if_exception_type(RadvdStartError))
+    def _launch(self) -> None:
+        """Launch the radvd process with retries.
+
+        Raises:
+            RadvdStartError: when a radvd error is found or process is dead
+        """
         command = (
             f"{self._radvd_binary} -C {shlex.quote(self._config_file)} "
             f"-p {shlex.quote(self._pid_file)} -m logfile -d 5 "
             f'-l {self._log_file} > "{self._log_file}" 2>&1'
         )
         self._runner.run_async(command)
-
-        try:
-            self._wait_for_process(timeout=timeout)
-        except Error:
-            self.stop()
-            raise
+        self._wait_for_process(timeout=10)
 
     def stop(self):
         """Kills the daemon if it is running."""
@@ -132,7 +146,8 @@
             A string of the radvd logs.
         """
         # TODO: Auto pulling of logs when stop is called.
-        return self._shell.read_file(self._log_file)
+        with LogLevel(self._runner.log, logging.INFO):
+            return self._shell.read_file(self._log_file)
 
     def _wait_for_process(self, timeout: int = 60) -> None:
         """Waits for the process to come up.
@@ -141,7 +156,8 @@
         a timeout. If the program never comes up then the log file
         will be scanned for errors.
 
-        Raises: See _scan_for_errors
+        Raises:
+            RadvdStartError: when a radvd error is found or process is dead
         """
         start_time = time.time()
         while time.time() - start_time < timeout and not self.is_alive():
@@ -158,16 +174,16 @@
                           is thrown.
 
         Raises:
-            Error: Raised when a radvd error is found.
+            RadvdStartError: when a radvd error is found or process is dead
         """
         # Store this so that all other errors have priority.
         is_dead = not self.is_alive()
 
         exited_prematurely = self._shell.search_file("Exiting", self._log_file)
         if exited_prematurely:
-            raise Error("Radvd exited prematurely.", self)
+            raise RadvdStartError("Radvd exited prematurely.", self)
         if should_be_up and is_dead:
-            raise Error("Radvd failed to start", self)
+            raise RadvdStartError("Radvd failed to start", self)
 
     def _write_configs(self, config: RadvdConfig) -> None:
         """Writes the configs to the radvd config file.
@@ -179,37 +195,35 @@
         conf = config.package_configs()
         lines = ["interface %s {" % self._interface]
         for interface_option_key, interface_option in conf["interface_options"].items():
-            lines.append(
-                "\t%s %s;" % (str(interface_option_key), str(interface_option))
-            )
-        lines.append("\tprefix %s" % conf["prefix"])
+            lines.append(f"\t{str(interface_option_key)} {str(interface_option)};")
+        lines.append(f"\tprefix {conf['prefix']}")
         lines.append("\t{")
         for prefix_option in conf["prefix_options"].items():
-            lines.append("\t\t%s;" % " ".join(map(str, prefix_option)))
+            lines.append(f"\t\t{' '.join(map(str, prefix_option))};")
         lines.append("\t};")
         if conf["clients"]:
             lines.append("\tclients")
             lines.append("\t{")
             for client in conf["clients"]:
-                lines.append("\t\t%s;" % client)
+                lines.append(f"\t\t{client};")
             lines.append("\t};")
         if conf["route"]:
             lines.append("\troute %s {" % conf["route"])
             for route_option in conf["route_options"].items():
-                lines.append("\t\t%s;" % " ".join(map(str, route_option)))
+                lines.append(f"\t\t{' '.join(map(str, route_option))};")
             lines.append("\t};")
         if conf["rdnss"]:
             lines.append(
                 "\tRDNSS %s {" % " ".join([str(elem) for elem in conf["rdnss"]])
             )
             for rdnss_option in conf["rdnss_options"].items():
-                lines.append("\t\t%s;" % " ".join(map(str, rdnss_option)))
+                lines.append(f"\t\t{' '.join(map(str, rdnss_option))};")
             lines.append("\t};")
         lines.append("};")
         output_config = "\n".join(lines)
-        logging.info("Writing %s" % self._config_file)
+        logging.info(f"Writing {self._config_file}")
         logging.debug("******************Start*******************")
-        logging.debug("\n%s" % output_config)
+        logging.debug(f"\n{output_config}")
         logging.debug("*******************End********************")
 
         self._shell.write_file(self._config_file, output_config)
diff --git a/src/antlion/controllers/ap_lib/radvd_config.py b/packages/antlion/controllers/ap_lib/radvd_config.py
similarity index 90%
rename from src/antlion/controllers/ap_lib/radvd_config.py
rename to packages/antlion/controllers/ap_lib/radvd_config.py
index 647df82..d3d6d97 100644
--- a/src/antlion/controllers/ap_lib/radvd_config.py
+++ b/packages/antlion/controllers/ap_lib/radvd_config.py
@@ -12,12 +12,11 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from typing import Any, List, Optional
+import collections
+from typing import Any
 
 from antlion.controllers.ap_lib import radvd_constants
 
-import collections
-
 
 class RadvdConfig(object):
     """The root settings for the router advertisement daemon.
@@ -28,41 +27,41 @@
     def __init__(
         self,
         prefix: str = radvd_constants.DEFAULT_PREFIX,
-        clients: List[str] = [],
-        route: Optional[Any] = None,
-        rdnss: List[str] = [],
-        ignore_if_missing: Optional[str] = None,
+        clients: list[str] = [],
+        route: Any | None = None,
+        rdnss: list[str] = [],
+        ignore_if_missing: str | None = None,
         adv_send_advert: str = radvd_constants.ADV_SEND_ADVERT_ON,
-        unicast_only: Optional[str] = None,
-        max_rtr_adv_interval: Optional[int] = None,
-        min_rtr_adv_interval: Optional[int] = None,
-        min_delay_between_ras: Optional[int] = None,
-        adv_managed_flag: Optional[str] = None,
-        adv_other_config_flag: Optional[str] = None,
-        adv_link_mtu: Optional[int] = None,
-        adv_reachable_time: Optional[int] = None,
-        adv_retrans_timer: Optional[int] = None,
-        adv_cur_hop_limit: Optional[int] = None,
-        adv_default_lifetime: Optional[int] = None,
-        adv_default_preference: Optional[str] = None,
-        adv_source_ll_address: Optional[str] = None,
-        adv_home_agent_flag: Optional[str] = None,
-        adv_home_agent_info: Optional[str] = None,
-        home_agent_lifetime: Optional[int] = None,
-        home_agent_preference: Optional[int] = None,
-        adv_mob_rtr_support_flag: Optional[str] = None,
-        adv_interval_opt: Optional[str] = None,
+        unicast_only: str | None = None,
+        max_rtr_adv_interval: int | None = None,
+        min_rtr_adv_interval: int | None = None,
+        min_delay_between_ras: int | None = None,
+        adv_managed_flag: str | None = None,
+        adv_other_config_flag: str | None = None,
+        adv_link_mtu: int | None = None,
+        adv_reachable_time: int | None = None,
+        adv_retrans_timer: int | None = None,
+        adv_cur_hop_limit: int | None = None,
+        adv_default_lifetime: int | None = None,
+        adv_default_preference: str | None = None,
+        adv_source_ll_address: str | None = None,
+        adv_home_agent_flag: str | None = None,
+        adv_home_agent_info: str | None = None,
+        home_agent_lifetime: int | None = None,
+        home_agent_preference: int | None = None,
+        adv_mob_rtr_support_flag: str | None = None,
+        adv_interval_opt: str | None = None,
         adv_on_link: str = radvd_constants.ADV_ON_LINK_ON,
         adv_autonomous: str = radvd_constants.ADV_AUTONOMOUS_ON,
-        adv_router_addr: Optional[str] = None,
-        adv_valid_lifetime: Optional[int] = None,
-        adv_preferred_lifetime: Optional[int] = None,
-        base_6to4_interface: Optional[str] = None,
-        adv_route_lifetime: Optional[int] = None,
-        adv_route_preference: Optional[str] = None,
-        adv_rdnss_preference: Optional[int] = None,
-        adv_rdnss_open: Optional[str] = None,
-        adv_rdnss_lifetime: Optional[int] = None,
+        adv_router_addr: str | None = None,
+        adv_valid_lifetime: int | None = None,
+        adv_preferred_lifetime: int | None = None,
+        base_6to4_interface: str | None = None,
+        adv_route_lifetime: int | None = None,
+        adv_route_preference: str | None = None,
+        adv_rdnss_preference: int | None = None,
+        adv_rdnss_open: str | None = None,
+        adv_rdnss_lifetime: int | None = None,
     ) -> None:
         """Construct a RadvdConfig.
 
@@ -241,7 +240,7 @@
         self._adv_rdnss_lifetime = adv_rdnss_lifetime
 
     def package_configs(self):
-        conf = dict()
+        conf: dict[str, Any] = dict()
         conf["prefix"] = self._prefix
         conf["clients"] = self._clients
         conf["route"] = self._route
diff --git a/src/antlion/controllers/ap_lib/radvd_constants.py b/packages/antlion/controllers/ap_lib/radvd_constants.py
similarity index 100%
rename from src/antlion/controllers/ap_lib/radvd_constants.py
rename to packages/antlion/controllers/ap_lib/radvd_constants.py
diff --git a/packages/antlion/controllers/ap_lib/regulatory_channels.py b/packages/antlion/controllers/ap_lib/regulatory_channels.py
new file mode 100644
index 0000000..432607c
--- /dev/null
+++ b/packages/antlion/controllers/ap_lib/regulatory_channels.py
@@ -0,0 +1,710 @@
+from dataclasses import dataclass
+
+Channel = int
+Bandwidth = int
+# TODO(http://b/281728764): Add device requirements to each frequency e.g.
+# "MUST be used indoors only" or "MUST be used with DFS".
+ChannelBandwidthMap = dict[Channel, list[Bandwidth]]
+
+
+@dataclass
+class CountryChannels:
+    country_code: str
+    allowed_channels: ChannelBandwidthMap
+
+
+# All antlion-supported channels and frequencies for use in regulatory testing.
+TEST_CHANNELS: ChannelBandwidthMap = {
+    1: [20],
+    2: [20],
+    3: [20],
+    4: [20],
+    5: [20],
+    6: [20],
+    7: [20],
+    8: [20],
+    9: [20],
+    10: [20],
+    11: [20],
+    12: [20],
+    13: [20],
+    14: [20],
+    36: [20, 40, 80],
+    40: [20, 40, 80],
+    44: [20, 40, 80],
+    48: [20, 40, 80],
+    52: [20, 40, 80],
+    56: [20, 40, 80],
+    60: [20, 40, 80],
+    64: [20, 40, 80],
+    100: [20, 40, 80],
+    104: [20, 40, 80],
+    108: [20, 40, 80],
+    112: [20, 40, 80],
+    116: [20, 40, 80],
+    120: [20, 40, 80],
+    124: [20, 40, 80],
+    128: [20, 40, 80],
+    132: [20, 40, 80],
+    136: [20, 40, 80],
+    140: [20, 40, 80],
+    144: [20, 40, 80],
+    149: [20, 40, 80],
+    153: [20, 40, 80],
+    157: [20, 40, 80],
+    161: [20, 40, 80],
+    165: [20],
+}
+
+# All universally accepted 2.4GHz channels and frequencies.
+WORLD_WIDE_2G_CHANNELS: ChannelBandwidthMap = {
+    1: [20],
+    2: [20],
+    3: [20],
+    4: [20],
+    5: [20],
+    6: [20],
+    7: [20],
+    8: [20],
+    9: [20],
+    10: [20],
+    11: [20],
+}
+
+# List of supported channels and frequencies by country.
+#
+# Please keep this alphabetically ordered. Thanks!
+#
+# TODO: Add missing countries: Russia, Israel, Korea, Turkey, South Africa,
+# Brazil, Bahrain, Vietnam
+COUNTRY_CHANNELS = {
+    "Australia": CountryChannels(
+        country_code="AU",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+            144: [20, 40, 80],
+            149: [20, 40, 80],
+            153: [20, 40, 80],
+            157: [20, 40, 80],
+            161: [20, 40, 80],
+            165: [20],
+        },
+    ),
+    "Austria": CountryChannels(
+        country_code="AT",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "Belgium": CountryChannels(
+        country_code="BE",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "Canada": CountryChannels(
+        country_code="CA",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+            144: [20, 40, 80],
+            149: [20, 40, 80],
+            153: [20, 40, 80],
+            157: [20, 40, 80],
+            161: [20, 40, 80],
+            165: [20],
+        },
+    ),
+    "China": CountryChannels(
+        country_code="CH",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "Denmark": CountryChannels(
+        country_code="DK",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "France": CountryChannels(
+        country_code="FR",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "Germany": CountryChannels(
+        country_code="DE",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "India": CountryChannels(
+        country_code="IN",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+            144: [20, 40, 80],
+            149: [20, 40, 80],
+            153: [20, 40, 80],
+            157: [20, 40, 80],
+            161: [20, 40, 80],
+            165: [20],
+        },
+    ),
+    "Ireland": CountryChannels(
+        country_code="IE",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "Italy": CountryChannels(
+        country_code="IT",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "Japan": CountryChannels(
+        country_code="JP",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+            144: [20, 40, 80],
+        },
+    ),
+    "Mexico": CountryChannels(
+        country_code="MX",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+            144: [20, 40, 80],
+            149: [20, 40, 80],
+            153: [20, 40, 80],
+            157: [20, 40, 80],
+            161: [20, 40, 80],
+            165: [20],
+        },
+    ),
+    "Netherlands": CountryChannels(
+        country_code="NL",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "New Zealand": CountryChannels(
+        country_code="NZ",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+            144: [20, 40, 80],
+            149: [20, 40, 80],
+            153: [20, 40, 80],
+            157: [20, 40, 80],
+            161: [20, 40, 80],
+            165: [20],
+        },
+    ),
+    "Norway": CountryChannels(
+        country_code="NO",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "Singapore": CountryChannels(
+        country_code="SG",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+            144: [20, 40, 80],
+            149: [20, 40, 80],
+            153: [20, 40, 80],
+            157: [20, 40, 80],
+            161: [20, 40, 80],
+            165: [20],
+        },
+    ),
+    "Spain": CountryChannels(
+        country_code="ES",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "Sweden": CountryChannels(
+        country_code="SE",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "Taiwan": CountryChannels(
+        country_code="TW",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+            144: [20, 40, 80],
+            149: [20, 40, 80],
+            153: [20, 40, 80],
+            157: [20, 40, 80],
+            161: [20, 40, 80],
+            165: [20],
+        },
+    ),
+    "United Kingdom of Great Britain": CountryChannels(
+        country_code="GB",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            11: [20],
+            12: [20],
+            13: [20],
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+        },
+    ),
+    "United States of America": CountryChannels(
+        country_code="US",
+        allowed_channels=WORLD_WIDE_2G_CHANNELS
+        | {
+            36: [20, 40, 80],
+            40: [20, 40, 80],
+            44: [20, 40, 80],
+            48: [20, 40, 80],
+            52: [20, 40, 80],
+            56: [20, 40, 80],
+            60: [20, 40, 80],
+            64: [20, 40, 80],
+            100: [20, 40, 80],
+            104: [20, 40, 80],
+            108: [20, 40, 80],
+            112: [20, 40, 80],
+            116: [20, 40, 80],
+            120: [20, 40, 80],
+            124: [20, 40, 80],
+            128: [20, 40, 80],
+            132: [20, 40, 80],
+            136: [20, 40, 80],
+            140: [20, 40, 80],
+            144: [20, 40, 80],
+            149: [20, 40, 80],
+            153: [20, 40, 80],
+            157: [20, 40, 80],
+            161: [20, 40, 80],
+            165: [20],
+        },
+    ),
+}
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/__init__.py b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/__init__.py
similarity index 100%
rename from src/antlion/controllers/ap_lib/third_party_ap_profiles/__init__.py
rename to packages/antlion/controllers/ap_lib/third_party_ap_profiles/__init__.py
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/actiontec.py b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/actiontec.py
similarity index 76%
rename from src/antlion/controllers/ap_lib/third_party_ap_profiles/actiontec.py
rename to packages/antlion/controllers/ap_lib/third_party_ap_profiles/actiontec.py
index 9e48935..f04f60b 100644
--- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/actiontec.py
+++ b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/actiontec.py
@@ -12,19 +12,19 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from antlion import utils
 
-from antlion.controllers.ap_lib import hostapd_config
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_utils
+from antlion.controllers.ap_lib import hostapd_config, hostapd_constants, hostapd_utils
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
 
 
-def actiontec_pk5000(iface_wlan_2g=None, channel=None, security=None, ssid=None):
+def actiontec_pk5000(
+    iface_wlan_2g: str, channel: int, security: Security, ssid: str | None = None
+) -> hostapd_config.HostapdConfig:
     """A simulated implementation of what a Actiontec PK5000 AP
     Args:
         iface_wlan_2g: The 2.4 interface of the test AP.
         channel: What channel to use.  Only 2.4Ghz is supported for this profile
-        security: A security profile.  Must be none or WPA2 as this is what is
+        security: A security profile.  Must be open or WPA2 as this is what is
             supported by the PK5000.
         ssid: Network name
     Returns:
@@ -41,13 +41,12 @@
         # Technically this should be 14 but since the PK5000 is a US only AP,
         # 11 is the highest allowable channel.
         raise ValueError(
-            "The Actiontec PK5000 does not support 5Ghz. "
-            "Invalid channel (%s)" % channel
+            f"The Actiontec PK5000 does not support 5Ghz. Invalid channel ({channel})"
         )
     # Verify interface and security
     hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
-    if security:
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
         hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     interface = iface_wlan_2g
@@ -56,9 +55,9 @@
     beacon_interval = 100
     dtim_period = 3
     # Sets the basic rates and supported rates of the PK5000
-    additional_params = utils.merge_dicts(
-        hostapd_constants.CCK_AND_OFDM_BASIC_RATES,
-        hostapd_constants.CCK_AND_OFDM_DATA_RATES,
+    additional_params = (
+        hostapd_constants.CCK_AND_OFDM_BASIC_RATES
+        | hostapd_constants.CCK_AND_OFDM_DATA_RATES
     )
 
     config = hostapd_config.HostapdConfig(
@@ -78,7 +77,9 @@
     return config
 
 
-def actiontec_mi424wr(iface_wlan_2g=None, channel=None, security=None, ssid=None):
+def actiontec_mi424wr(
+    iface_wlan_2g: str, channel: int, security: Security, ssid: str | None = None
+) -> hostapd_config.HostapdConfig:
     # TODO(b/143104825): Permit RIFS once it is supported
     """A simulated implementation of an Actiontec MI424WR AP.
     Args:
@@ -103,13 +104,12 @@
     """
     if channel > 11:
         raise ValueError(
-            "The Actiontec MI424WR does not support 5Ghz. "
-            "Invalid channel (%s)" % channel
+            f"The Actiontec MI424WR does not support 5Ghz. Invalid channel ({channel})"
         )
     # Verify interface and security
     hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
-    if security:
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
         hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     n_capabilities = [
@@ -117,9 +117,9 @@
         hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
         hostapd_constants.N_CAPABILITY_RX_STBC1,
     ]
-    rates = utils.merge_dicts(
-        hostapd_constants.CCK_AND_OFDM_DATA_RATES,
-        hostapd_constants.CCK_AND_OFDM_BASIC_RATES,
+    rates = (
+        hostapd_constants.CCK_AND_OFDM_DATA_RATES
+        | hostapd_constants.CCK_AND_OFDM_BASIC_RATES
     )
     # Proprietary Atheros Communication: Adv Capability IE
     # Proprietary Atheros Communication: Unknown IE
@@ -130,7 +130,7 @@
         "0706555320010b1b"
     }
 
-    additional_params = utils.merge_dicts(rates, vendor_elements)
+    additional_params = rates | vendor_elements
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/asus.py b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/asus.py
similarity index 87%
rename from src/antlion/controllers/ap_lib/third_party_ap_profiles/asus.py
rename to packages/antlion/controllers/ap_lib/third_party_ap_profiles/asus.py
index ea25157..6a9ae27 100644
--- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/asus.py
+++ b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/asus.py
@@ -12,16 +12,18 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from antlion import utils
 
-from antlion.controllers.ap_lib import hostapd_config
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_utils
+from antlion.controllers.ap_lib import hostapd_config, hostapd_constants, hostapd_utils
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
 
 
 def asus_rtac66u(
-    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
-):
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    channel: int,
+    security: Security,
+    ssid: str | None = None,
+) -> hostapd_config.HostapdConfig:
     # TODO(b/143104825): Permit RIFS once it is supported
     # TODO(b/144446076): Address non-whirlwind hardware capabilities.
     """A simulated implementation of an Asus RTAC66U AP.
@@ -29,7 +31,7 @@
         iface_wlan_2g: The 2.4Ghz interface of the test AP.
         iface_wlan_5g: The 5Ghz interface of the test AP.
         channel: What channel to use.
-        security: A security profile.  Must be none or WPA2 as this is what is
+        security: A security profile.  Must be open or WPA2 as this is what is
             supported by the RTAC66U.
         ssid: Network name
     Returns:
@@ -75,8 +77,8 @@
     # Verify interface and security
     hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
     hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
-    if security:
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
         hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
@@ -119,9 +121,7 @@
             hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
         ]
 
-    additional_params = utils.merge_dicts(
-        rates, vendor_elements, hostapd_constants.UAPSD_ENABLED
-    )
+    additional_params = rates | vendor_elements | hostapd_constants.UAPSD_ENABLED
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
@@ -144,14 +144,18 @@
 
 
 def asus_rtac86u(
-    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
-):
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    channel: int,
+    security: Security,
+    ssid: str | None = None,
+) -> hostapd_config.HostapdConfig:
     """A simulated implementation of an Asus RTAC86U AP.
     Args:
         iface_wlan_2g: The 2.4Ghz interface of the test AP.
         iface_wlan_5g: The 5Ghz interface of the test AP.
         channel: What channel to use.
-        security: A security profile.  Must be none or WPA2 as this is what is
+        security: A security profile.  Must be open or WPA2 as this is what is
             supported by the RTAC86U.
         ssid: Network name
     Returns:
@@ -180,8 +184,8 @@
     # Verify interface and security
     hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
     hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
-    if security:
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
         hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
@@ -202,7 +206,7 @@
         interface = iface_wlan_5g
         mode = hostapd_constants.MODE_11A
         rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
-        spectrum_mgmt = (True,)
+        spectrum_mgmt = True
         # Country Information IE (w/ individual channel info)
         # TPC Report Transmit Power IE
         # Measurement Pilot Transmission IE
@@ -214,7 +218,7 @@
             "42020000"
         }
 
-    additional_params = utils.merge_dicts(rates, qbss, vendor_elements)
+    additional_params = rates | qbss | vendor_elements
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
@@ -234,8 +238,12 @@
 
 
 def asus_rtac5300(
-    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
-):
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    channel: int,
+    security: Security,
+    ssid: str | None = None,
+) -> hostapd_config.HostapdConfig:
     # TODO(b/143104825): Permit RIFS once it is supported
     # TODO(b/144446076): Address non-whirlwind hardware capabilities.
     """A simulated implementation of an Asus RTAC5300 AP.
@@ -243,7 +251,7 @@
         iface_wlan_2g: The 2.4Ghz interface of the test AP.
         iface_wlan_5g: The 5Ghz interface of the test AP.
         channel: What channel to use.
-        security: A security profile.  Must be none or WPA2 as this is what is
+        security: A security profile.  Must be open or WPA2 as this is what is
             supported by the RTAC5300.
         ssid: Network name
     Returns:
@@ -286,8 +294,8 @@
     # Verify interface and security
     hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
     hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
-    if security:
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
         hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
@@ -334,9 +342,7 @@
             hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
         ]
 
-    additional_params = utils.merge_dicts(
-        rates, qbss, vendor_elements, hostapd_constants.UAPSD_ENABLED
-    )
+    additional_params = rates | qbss | vendor_elements | hostapd_constants.UAPSD_ENABLED
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
@@ -358,14 +364,18 @@
 
 
 def asus_rtn56u(
-    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
-):
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    channel: int,
+    security: Security,
+    ssid: str | None = None,
+) -> hostapd_config.HostapdConfig:
     """A simulated implementation of an Asus RTN56U AP.
     Args:
         iface_wlan_2g: The 2.4Ghz interface of the test AP.
         iface_wlan_5g: The 5Ghz interface of the test AP.
         channel: What channel to use.
-        security: A security profile.  Must be none or WPA2 as this is what is
+        security: A security profile.  Must be open or WPA2 as this is what is
             supported by the RTN56U.
         ssid: Network name
     Returns:
@@ -396,8 +406,8 @@
     # Verify interface and security
     hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
     hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
-    if security:
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
         hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
@@ -435,9 +445,7 @@
         # US Country Code IE
         vendor_elements = {"vendor_elements": "dd07000c4307000000" "0706555320010b14"}
 
-    additional_params = utils.merge_dicts(
-        rates, vendor_elements, qbss, hostapd_constants.UAPSD_ENABLED
-    )
+    additional_params = rates | vendor_elements | qbss | hostapd_constants.UAPSD_ENABLED
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
@@ -458,15 +466,19 @@
 
 
 def asus_rtn66u(
-    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
-):
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    channel: int,
+    security: Security,
+    ssid: str | None = None,
+) -> hostapd_config.HostapdConfig:
     # TODO(b/143104825): Permit RIFS once it is supported
     """A simulated implementation of an Asus RTN66U AP.
     Args:
         iface_wlan_2g: The 2.4Ghz interface of the test AP.
         iface_wlan_5g: The 5Ghz interface of the test AP.
         channel: What channel to use.
-        security: A security profile.  Must be none or WPA2 as this is what is
+        security: A security profile.  Must be open or WPA2 as this is what is
             supported by the RTN66U.
         ssid: Network name
     Returns:
@@ -495,8 +507,8 @@
     # Verify interface and security
     hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
     hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
-    if security:
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
         hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
@@ -522,9 +534,7 @@
         interface = iface_wlan_5g
         rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
 
-    additional_params = utils.merge_dicts(
-        rates, vendor_elements, hostapd_constants.UAPSD_ENABLED
-    )
+    additional_params = rates | vendor_elements | hostapd_constants.UAPSD_ENABLED
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/belkin.py b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/belkin.py
similarity index 77%
rename from src/antlion/controllers/ap_lib/third_party_ap_profiles/belkin.py
rename to packages/antlion/controllers/ap_lib/third_party_ap_profiles/belkin.py
index 9c5c99d..62a9d66 100644
--- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/belkin.py
+++ b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/belkin.py
@@ -12,20 +12,20 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from antlion import utils
 
-from antlion.controllers.ap_lib import hostapd_config
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_utils
+from antlion.controllers.ap_lib import hostapd_config, hostapd_constants, hostapd_utils
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
 
 
-def belkin_f9k1001v5(iface_wlan_2g=None, channel=None, security=None, ssid=None):
+def belkin_f9k1001v5(
+    iface_wlan_2g: str, channel: int, security: Security, ssid: str | None = None
+) -> hostapd_config.HostapdConfig:
     # TODO(b/143104825): Permit RIFS once it is supported
     """A simulated implementation of what a Belkin F9K1001v5 AP
     Args:
         iface_wlan_2g: The 2.4Ghz interface of the test AP.
         channel: What channel to use.
-        security: A security profile (None or WPA2).
+        security: A security profile (open or WPA2).
         ssid: The network name.
     Returns:
         A hostapd config.
@@ -50,13 +50,12 @@
     """
     if channel > 11:
         raise ValueError(
-            "The Belkin F9k1001v5 does not support 5Ghz. "
-            "Invalid channel (%s)" % channel
+            f"The Belkin F9k1001v5 does not support 5Ghz. Invalid channel ({channel})"
         )
     # Verify interface and security
     hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
-    if security:
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
         hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     n_capabilities = [
@@ -67,9 +66,9 @@
         hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
     ]
 
-    rates = additional_params = utils.merge_dicts(
-        hostapd_constants.CCK_AND_OFDM_BASIC_RATES,
-        hostapd_constants.CCK_AND_OFDM_DATA_RATES,
+    rates = (
+        hostapd_constants.CCK_AND_OFDM_BASIC_RATES
+        | hostapd_constants.CCK_AND_OFDM_DATA_RATES
     )
 
     # Broadcom IE
@@ -79,7 +78,7 @@
         "dd180050f204104a00011010440001021049000600372a000120"
     }
 
-    additional_params = utils.merge_dicts(rates, vendor_elements)
+    additional_params = rates | vendor_elements
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/linksys.py b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/linksys.py
similarity index 86%
rename from src/antlion/controllers/ap_lib/third_party_ap_profiles/linksys.py
rename to packages/antlion/controllers/ap_lib/third_party_ap_profiles/linksys.py
index 8010837..21f3fb1 100644
--- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/linksys.py
+++ b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/linksys.py
@@ -12,16 +12,18 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from antlion import utils
 
-from antlion.controllers.ap_lib import hostapd_config
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_utils
+from antlion.controllers.ap_lib import hostapd_config, hostapd_constants, hostapd_utils
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
 
 
 def linksys_ea4500(
-    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
-):
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    channel: int,
+    security: Security,
+    ssid: str | None = None,
+) -> hostapd_config.HostapdConfig:
     # TODO(b/143104825): Permit RIFS once it is supported
     # TODO(b/144446076): Address non-whirlwind hardware capabilities.
     """A simulated implementation of what a Linksys EA4500 AP
@@ -29,7 +31,7 @@
         iface_wlan_2g: The 2.4Ghz interface of the test AP.
         iface_wlan_5g: The 5GHz interface of the test AP.
         channel: What channel to use.
-        security: A security profile (None or WPA2).
+        security: A security profile (open or WPA2).
         ssid: The network name.
     Returns:
         A hostapd config.
@@ -53,8 +55,8 @@
     # Verify interface and security
     hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
     hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
-    if security:
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
         hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
@@ -90,9 +92,7 @@
         rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
         obss_interval = None
 
-    additional_params = utils.merge_dicts(
-        rates, vendor_elements, hostapd_constants.UAPSD_ENABLED
-    )
+    additional_params = rates | vendor_elements | hostapd_constants.UAPSD_ENABLED
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
@@ -114,14 +114,18 @@
 
 
 def linksys_ea9500(
-    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
-):
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    channel: int,
+    security: Security,
+    ssid: str | None = None,
+) -> hostapd_config.HostapdConfig:
     """A simulated implementation of what a Linksys EA9500 AP
     Args:
         iface_wlan_2g: The 2.4Ghz interface of the test AP.
         iface_wlan_5g: The 5GHz interface of the test AP.
         channel: What channel to use.
-        security: A security profile (None or WPA2).
+        security: A security profile (open or WPA2).
         ssid: The network name.
     Returns:
         A hostapd config.
@@ -143,8 +147,8 @@
     # Verify interface and security
     hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
     hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
-    if security:
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
         hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
@@ -165,7 +169,7 @@
         mode = hostapd_constants.MODE_11A
         rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
 
-    additional_params = utils.merge_dicts(rates, qbss, vendor_elements)
+    additional_params = rates | qbss | vendor_elements
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
@@ -184,15 +188,19 @@
 
 
 def linksys_wrt1900acv2(
-    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
-):
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    channel: int,
+    security: Security,
+    ssid: str | None = None,
+) -> hostapd_config.HostapdConfig:
     # TODO(b/144446076): Address non-whirlwind hardware capabilities.
     """A simulated implementation of what a Linksys WRT1900ACV2 AP
     Args:
         iface_wlan_2g: The 2.4Ghz interface of the test AP.
         iface_wlan_5g: The 5GHz interface of the test AP.
         channel: What channel to use.
-        security: A security profile (None or WPA2).
+        security: A security profile (open or WPA2).
         ssid: The network name.
     Returns:
         A hostapd config.
@@ -223,8 +231,8 @@
     # Verify interface and security
     hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
     hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
-    if security:
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
         hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
@@ -265,15 +273,15 @@
         interface = iface_wlan_5g
         rates.update(hostapd_constants.OFDM_ONLY_BASIC_RATES)
         obss_interval = None
-        spectrum_mgmt = (True,)
+        spectrum_mgmt = True
         local_pwr_constraint = {"local_pwr_constraint": 3}
         # Country Information IE (w/ individual channel info)
         vendor_elements["vendor_elements"] += (
             "071e5553202401112801112c011130" "01119501179901179d0117a10117a50117"
         )
 
-    additional_params = utils.merge_dicts(
-        rates, vendor_elements, hostapd_constants.UAPSD_ENABLED, local_pwr_constraint
+    additional_params = (
+        rates | vendor_elements | hostapd_constants.UAPSD_ENABLED | local_pwr_constraint
     )
 
     config = hostapd_config.HostapdConfig(
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/netgear.py b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/netgear.py
similarity index 87%
rename from src/antlion/controllers/ap_lib/third_party_ap_profiles/netgear.py
rename to packages/antlion/controllers/ap_lib/third_party_ap_profiles/netgear.py
index 25a91cd..69c1845 100644
--- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/netgear.py
+++ b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/netgear.py
@@ -12,16 +12,18 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from antlion import utils
 
-from antlion.controllers.ap_lib import hostapd_config
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_utils
+from antlion.controllers.ap_lib import hostapd_config, hostapd_constants, hostapd_utils
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
 
 
 def netgear_r7000(
-    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
-):
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    channel: int,
+    security: Security,
+    ssid: str | None = None,
+) -> hostapd_config.HostapdConfig:
     # TODO(b/143104825): Permit RIFS once it is supported
     # TODO(b/144446076): Address non-whirlwind hardware capabilities.
     """A simulated implementation of what a Netgear R7000 AP
@@ -29,7 +31,7 @@
         iface_wlan_2g: The 2.4Ghz interface of the test AP.
         iface_wlan_5g: The 5GHz interface of the test AP.
         channel: What channel to use.
-        security: A security profile (None or WPA2).
+        security: A security profile (open or WPA2).
         ssid: The network name.
     Returns:
         A hostapd config.
@@ -79,8 +81,8 @@
     # Verify interface and security
     hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
     hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
-    if security:
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
         hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
@@ -138,13 +140,13 @@
             hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
         ]
 
-    additional_params = utils.merge_dicts(
-        rates,
-        vendor_elements,
-        qbss,
-        hostapd_constants.ENABLE_RRM_BEACON_REPORT,
-        hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT,
-        hostapd_constants.UAPSD_ENABLED,
+    additional_params = (
+        rates
+        | vendor_elements
+        | qbss
+        | hostapd_constants.ENABLE_RRM_BEACON_REPORT
+        | hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT
+        | hostapd_constants.UAPSD_ENABLED
     )
 
     config = hostapd_config.HostapdConfig(
@@ -168,8 +170,12 @@
 
 
 def netgear_wndr3400(
-    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
-):
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    channel: int,
+    security: Security,
+    ssid: str | None = None,
+) -> hostapd_config.HostapdConfig:
     # TODO(b/143104825): Permit RIFS on 5GHz once it is supported
     # TODO(b/144446076): Address non-whirlwind hardware capabilities.
     """A simulated implementation of what a Netgear WNDR3400 AP
@@ -177,7 +183,7 @@
         iface_wlan_2g: The 2.4Ghz interface of the test AP.
         iface_wlan_5g: The 5GHz interface of the test AP.
         channel: What channel to use.
-        security: A security profile (None or WPA2).
+        security: A security profile (open or WPA2).
         ssid: The network name.
     Returns:
         A hostapd config.
@@ -206,8 +212,8 @@
     # Verify interface and security
     hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
     hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
-    if security:
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
         hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
@@ -241,9 +247,7 @@
         obss_interval = None
         n_capabilities.append(hostapd_constants.N_CAPABILITY_HT40_PLUS)
 
-    additional_params = utils.merge_dicts(
-        rates, vendor_elements, hostapd_constants.UAPSD_ENABLED
-    )
+    additional_params = rates | vendor_elements | hostapd_constants.UAPSD_ENABLED
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/securifi.py b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/securifi.py
similarity index 78%
rename from src/antlion/controllers/ap_lib/third_party_ap_profiles/securifi.py
rename to packages/antlion/controllers/ap_lib/third_party_ap_profiles/securifi.py
index 4a5bf68..8b2d0eb 100644
--- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/securifi.py
+++ b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/securifi.py
@@ -12,19 +12,19 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from antlion import utils
 
-from antlion.controllers.ap_lib import hostapd_config
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_utils
+from antlion.controllers.ap_lib import hostapd_config, hostapd_constants, hostapd_utils
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
 
 
-def securifi_almond(iface_wlan_2g=None, channel=None, security=None, ssid=None):
+def securifi_almond(
+    iface_wlan_2g: str, channel: int, security: Security, ssid: str | None = None
+) -> hostapd_config.HostapdConfig:
     """A simulated implementation of a Securifi Almond AP
     Args:
         iface_wlan_2g: The 2.4Ghz interface of the test AP.
         channel: What channel to use.
-        security: A security profile (None or WPA2).
+        security: A security profile (open or WPA2).
         ssid: The network name.
     Returns:
         A hostapd config.
@@ -48,13 +48,12 @@
     """
     if channel > 11:
         raise ValueError(
-            "The Securifi Almond does not support 5Ghz. "
-            "Invalid channel (%s)" % channel
+            f"The Securifi Almond does not support 5Ghz. Invalid channel ({channel})"
         )
     # Verify interface and security
     hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
-    if security:
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
         hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     n_capabilities = [
@@ -66,9 +65,9 @@
         hostapd_constants.N_CAPABILITY_DSSS_CCK_40,
     ]
 
-    rates = utils.merge_dicts(
-        hostapd_constants.CCK_AND_OFDM_BASIC_RATES,
-        hostapd_constants.CCK_AND_OFDM_DATA_RATES,
+    rates = (
+        hostapd_constants.CCK_AND_OFDM_BASIC_RATES
+        | hostapd_constants.CCK_AND_OFDM_DATA_RATES
     )
 
     # Ralink Technology IE
@@ -83,7 +82,7 @@
 
     qbss = {"bss_load_update_period": 50, "chan_util_avg_period": 600}
 
-    additional_params = utils.merge_dicts(rates, vendor_elements, qbss)
+    additional_params = rates | vendor_elements | qbss
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
diff --git a/src/antlion/controllers/ap_lib/third_party_ap_profiles/tplink.py b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/tplink.py
similarity index 85%
rename from src/antlion/controllers/ap_lib/third_party_ap_profiles/tplink.py
rename to packages/antlion/controllers/ap_lib/third_party_ap_profiles/tplink.py
index 81eeeec..1a01303 100644
--- a/src/antlion/controllers/ap_lib/third_party_ap_profiles/tplink.py
+++ b/packages/antlion/controllers/ap_lib/third_party_ap_profiles/tplink.py
@@ -12,23 +12,25 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from antlion import utils
 
-from antlion.controllers.ap_lib import hostapd_config
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_utils
+from antlion.controllers.ap_lib import hostapd_config, hostapd_constants, hostapd_utils
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
 
 
 def tplink_archerc5(
-    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
-):
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    channel: int,
+    security: Security,
+    ssid: str | None = None,
+) -> hostapd_config.HostapdConfig:
     # TODO(b/144446076): Address non-whirlwind hardware capabilities.
     """A simulated implementation of an TPLink ArcherC5 AP.
     Args:
         iface_wlan_2g: The 2.4Ghz interface of the test AP.
         iface_wlan_5g: The 5GHz interface of the test AP.
         channel: What channel to use.
-        security: A security profile (None or WPA2).
+        security: A security profile (open or WPA2).
         ssid: The network name.
     Returns:
         A hostapd config.
@@ -73,8 +75,8 @@
     # Verify interface and security
     hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
     hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
-    if security:
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
         hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
@@ -120,13 +122,13 @@
             hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
         ]
 
-    additional_params = utils.merge_dicts(
-        rates,
-        vendor_elements,
-        qbss,
-        hostapd_constants.ENABLE_RRM_BEACON_REPORT,
-        hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT,
-        hostapd_constants.UAPSD_ENABLED,
+    additional_params = (
+        rates
+        | vendor_elements
+        | qbss
+        | hostapd_constants.ENABLE_RRM_BEACON_REPORT
+        | hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT
+        | hostapd_constants.UAPSD_ENABLED
     )
 
     config = hostapd_config.HostapdConfig(
@@ -149,15 +151,19 @@
 
 
 def tplink_archerc7(
-    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
-):
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    channel: int,
+    security: Security,
+    ssid: str | None = None,
+) -> hostapd_config.HostapdConfig:
     # TODO(b/143104825): Permit RIFS once it is supported
     """A simulated implementation of an TPLink ArcherC7 AP.
     Args:
         iface_wlan_2g: The 2.4Ghz interface of the test AP.
         iface_wlan_5g: The 5GHz interface of the test AP.
         channel: What channel to use.
-        security: A security profile (None or WPA2).
+        security: A security profile (open or WPA2).
         ssid: The network name.
     Returns:
         A hostapd config.
@@ -180,13 +186,13 @@
     # Verify interface and security
     hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
     hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
-    if security:
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
         hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
     rates = hostapd_constants.CCK_AND_OFDM_DATA_RATES
-    vht_channel_width = 80
+    vht_channel_width: int | None = 80
     n_capabilities = [
         hostapd_constants.N_CAPABILITY_LDPC,
         hostapd_constants.N_CAPABILITY_SGI20,
@@ -246,8 +252,8 @@
             hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN,
         ]
 
-    additional_params = utils.merge_dicts(
-        rates, vendor_elements, hostapd_constants.UAPSD_ENABLED, pwr_constraint
+    additional_params = (
+        rates | vendor_elements | hostapd_constants.UAPSD_ENABLED | pwr_constraint
     )
 
     config = hostapd_config.HostapdConfig(
@@ -271,8 +277,12 @@
 
 
 def tplink_c1200(
-    iface_wlan_2g=None, iface_wlan_5g=None, channel=None, security=None, ssid=None
-):
+    iface_wlan_2g: str,
+    iface_wlan_5g: str,
+    channel: int,
+    security: Security,
+    ssid: str | None = None,
+) -> hostapd_config.HostapdConfig:
     # TODO(b/143104825): Permit RIFS once it is supported
     # TODO(b/144446076): Address non-whirlwind hardware capabilities.
     """A simulated implementation of an TPLink C1200 AP.
@@ -280,7 +290,7 @@
         iface_wlan_2g: The 2.4Ghz interface of the test AP.
         iface_wlan_5g: The 5GHz interface of the test AP.
         channel: What channel to use.
-        security: A security profile (None or WPA2).
+        security: A security profile (open or WPA2).
         ssid: The network name.
     Returns:
         A hostapd config.
@@ -312,8 +322,8 @@
     # Verify interface and security
     hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
     hostapd_utils.verify_interface(iface_wlan_5g, hostapd_constants.INTERFACE_5G_LIST)
-    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
-    if security:
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
         hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     # Common Parameters
@@ -357,12 +367,12 @@
             hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
         ]
 
-    additional_params = utils.merge_dicts(
-        rates,
-        vendor_elements,
-        hostapd_constants.ENABLE_RRM_BEACON_REPORT,
-        hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT,
-        hostapd_constants.UAPSD_ENABLED,
+    additional_params = (
+        rates
+        | vendor_elements
+        | hostapd_constants.ENABLE_RRM_BEACON_REPORT
+        | hostapd_constants.ENABLE_RRM_NEIGHBOR_REPORT
+        | hostapd_constants.UAPSD_ENABLED
     )
 
     config = hostapd_config.HostapdConfig(
@@ -384,13 +394,15 @@
     return config
 
 
-def tplink_tlwr940n(iface_wlan_2g=None, channel=None, security=None, ssid=None):
+def tplink_tlwr940n(
+    iface_wlan_2g: str, channel: int, security: Security, ssid: str | None = None
+) -> hostapd_config.HostapdConfig:
     # TODO(b/143104825): Permit RIFS once it is supported
     """A simulated implementation of an TPLink TLWR940N AP.
     Args:
         iface_wlan_2g: The 2.4Ghz interface of the test AP.
         channel: What channel to use.
-        security: A security profile (None or WPA2).
+        security: A security profile (open or WPA2).
         ssid: The network name.
     Returns:
         A hostapd config.
@@ -411,8 +423,8 @@
         )
     # Verify interface and security
     hostapd_utils.verify_interface(iface_wlan_2g, hostapd_constants.INTERFACE_2G_LIST)
-    hostapd_utils.verify_security_mode(security, [None, hostapd_constants.WPA2])
-    if security:
+    hostapd_utils.verify_security_mode(security, [SecurityMode.OPEN, SecurityMode.WPA2])
+    if security.security_mode is not SecurityMode.OPEN:
         hostapd_utils.verify_cipher(security, [hostapd_constants.WPA2_DEFAULT_CIPER])
 
     n_capabilities = [
@@ -421,9 +433,9 @@
         hostapd_constants.N_CAPABILITY_RX_STBC1,
     ]
 
-    rates = utils.merge_dicts(
-        hostapd_constants.CCK_AND_OFDM_BASIC_RATES,
-        hostapd_constants.CCK_AND_OFDM_DATA_RATES,
+    rates = (
+        hostapd_constants.CCK_AND_OFDM_BASIC_RATES
+        | hostapd_constants.CCK_AND_OFDM_DATA_RATES
     )
 
     # Atheros Communications, Inc. IE
@@ -434,9 +446,7 @@
         "0100020001"
     }
 
-    additional_params = utils.merge_dicts(
-        rates, vendor_elements, hostapd_constants.UAPSD_ENABLED
-    )
+    additional_params = rates | vendor_elements | hostapd_constants.UAPSD_ENABLED
 
     config = hostapd_config.HostapdConfig(
         ssid=ssid,
diff --git a/src/antlion/controllers/ap_lib/wireless_network_management.py b/packages/antlion/controllers/ap_lib/wireless_network_management.py
similarity index 89%
rename from src/antlion/controllers/ap_lib/wireless_network_management.py
rename to packages/antlion/controllers/ap_lib/wireless_network_management.py
index 62ba34e..848cf5f 100644
--- a/src/antlion/controllers/ap_lib/wireless_network_management.py
+++ b/packages/antlion/controllers/ap_lib/wireless_network_management.py
@@ -14,12 +14,12 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from typing import List, NewType, Optional
+from typing import NewType
 
 from antlion.controllers.ap_lib.radio_measurement import NeighborReportElement
 
 BssTransitionCandidateList = NewType(
-    "BssTransitionCandidateList", List[NeighborReportElement]
+    "BssTransitionCandidateList", list[NeighborReportElement]
 )
 
 
@@ -58,9 +58,9 @@
         ess_disassociation_imminent: bool = False,
         disassociation_timer: int = 0,
         validity_interval: int = 1,
-        bss_termination_duration: Optional[BssTerminationDuration] = None,
-        session_information_url: Optional[str] = None,
-        candidate_list: Optional[BssTransitionCandidateList] = None,
+        bss_termination_duration: BssTerminationDuration | None = None,
+        session_information_url: str | None = None,
+        candidate_list: BssTransitionCandidateList | None = None,
     ):
         """Create a BSS Transition Management request.
 
@@ -128,7 +128,7 @@
         return self._ess_disassociation_imminent
 
     @property
-    def disassociation_timer(self) -> Optional[int]:
+    def disassociation_timer(self) -> int | None:
         if self.disassociation_imminent:
             return self._disassociation_timer
         # Otherwise, field is reserved.
@@ -139,13 +139,13 @@
         return self._validity_interval
 
     @property
-    def bss_termination_duration(self) -> Optional[BssTerminationDuration]:
+    def bss_termination_duration(self) -> BssTerminationDuration | None:
         return self._bss_termination_duration
 
     @property
-    def session_information_url(self) -> Optional[str]:
+    def session_information_url(self) -> str | None:
         return self._session_information_url
 
     @property
-    def candidate_list(self) -> Optional[BssTransitionCandidateList]:
+    def candidate_list(self) -> BssTransitionCandidateList | None:
         return self._candidate_list
diff --git a/packages/antlion/controllers/attenuator.py b/packages/antlion/controllers/attenuator.py
new file mode 100644
index 0000000..f9c8b97
--- /dev/null
+++ b/packages/antlion/controllers/attenuator.py
@@ -0,0 +1,364 @@
+#!/usr/bin/env python3.4
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+import enum
+import logging
+from typing import Protocol, runtime_checkable
+
+from antlion.libs.proc import job
+from antlion.types import ControllerConfig, Json
+from antlion.validation import MapValidator
+
+MOBLY_CONTROLLER_CONFIG_NAME: str = "Attenuator"
+ACTS_CONTROLLER_REFERENCE_NAME = "attenuators"
+_ATTENUATOR_OPEN_RETRIES = 3
+
+
+class Model(enum.StrEnum):
+    AEROFLEX_TELNET = "aeroflex.telnet"
+    MINICIRCUITS_HTTP = "minicircuits.http"
+    MINICIRCUITS_TELNET = "minicircuits.telnet"
+
+    def create(self, instrument_count: int) -> AttenuatorInstrument:
+        match self:
+            case Model.AEROFLEX_TELNET:
+                import antlion.controllers.attenuator_lib.aeroflex.telnet
+
+                return antlion.controllers.attenuator_lib.aeroflex.telnet.AttenuatorInstrument(
+                    instrument_count
+                )
+            case Model.MINICIRCUITS_HTTP:
+                import antlion.controllers.attenuator_lib.minicircuits.http
+
+                return antlion.controllers.attenuator_lib.minicircuits.http.AttenuatorInstrument(
+                    instrument_count
+                )
+            case Model.MINICIRCUITS_TELNET:
+                import antlion.controllers.attenuator_lib.minicircuits.telnet
+
+                return antlion.controllers.attenuator_lib.minicircuits.telnet.AttenuatorInstrument(
+                    instrument_count
+                )
+
+
+def create(configs: list[ControllerConfig]) -> list[Attenuator]:
+    attenuators: list[Attenuator] = []
+    for config in configs:
+        c = MapValidator(config)
+        attn_model = c.get(str, "Model")
+        protocol = c.get(str, "Protocol", "telnet")
+        model = Model(f"{attn_model}.{protocol}")
+
+        instrument_count = c.get(int, "InstrumentCount")
+        attenuator_instrument = model.create(instrument_count)
+
+        address = c.get(str, "Address")
+        port = c.get(int, "Port")
+
+        for attempt_number in range(1, _ATTENUATOR_OPEN_RETRIES + 1):
+            try:
+                attenuator_instrument.open(address, port)
+            except Exception as e:
+                logging.error(
+                    "Attempt %s to open connection to attenuator " "failed: %s",
+                    attempt_number,
+                    e,
+                )
+                if attempt_number == _ATTENUATOR_OPEN_RETRIES:
+                    ping_output = job.run(
+                        f"ping {address} -c 1 -w 1", ignore_status=True
+                    )
+                    if ping_output.returncode == 1:
+                        logging.error("Unable to ping attenuator at %s", address)
+                    else:
+                        logging.error("Able to ping attenuator at %s", address)
+                        job.run(
+                            ["telnet", address, str(port)],
+                            stdin=b"q",
+                            ignore_status=True,
+                        )
+                    raise
+        for i in range(instrument_count):
+            attenuators.append(Attenuator(attenuator_instrument, idx=i))
+    return attenuators
+
+
+def destroy(objects: list[Attenuator]) -> None:
+    for attn in objects:
+        attn.instrument.close()
+
+
+def get_info(objects: list[Attenuator]) -> list[Json]:
+    """Get information on a list of Attenuator objects.
+
+    Args:
+        attenuators: A list of Attenuator objects.
+
+    Returns:
+        A list of dict, each representing info for Attenuator objects.
+    """
+    return [
+        {
+            "Address": attenuator.instrument.address,
+            "Attenuator_Port": attenuator.idx,
+        }
+        for attenuator in objects
+    ]
+
+
+def get_attenuators_for_device(
+    device_attenuator_configs: list[ControllerConfig],
+    attenuators: list[Attenuator],
+    attenuator_key: str,
+) -> list[Attenuator]:
+    """Gets the list of attenuators associated to a specified device and builds
+    a list of the attenuator objects associated to the ip address in the
+    device's section of the ACTS config and the Attenuator's IP address.  In the
+    example below the access point object has an attenuator dictionary with
+    IP address associated to an attenuator object.  The address is the only
+    mandatory field and the 'attenuator_ports_wifi_2g' and
+    'attenuator_ports_wifi_5g' are the attenuator_key specified above.  These
+    can be anything and is sent in as a parameter to this function.  The numbers
+    in the list are ports that are in the attenuator object.  Below is an
+    standard Access_Point object and the link to a standard Attenuator object.
+    Notice the link is the IP address, which is why the IP address is mandatory.
+
+    "AccessPoint": [
+        {
+          "ssh_config": {
+            "user": "root",
+            "host": "192.168.42.210"
+          },
+          "Attenuator": [
+            {
+              "Address": "192.168.42.200",
+              "attenuator_ports_wifi_2g": [
+                0,
+                1,
+                3
+              ],
+              "attenuator_ports_wifi_5g": [
+                0,
+                1
+              ]
+            }
+          ]
+        }
+      ],
+      "Attenuator": [
+        {
+          "Model": "minicircuits",
+          "InstrumentCount": 4,
+          "Address": "192.168.42.200",
+          "Port": 23
+        }
+      ]
+    Args:
+        device_attenuator_configs: A list of attenuators config information in
+            the acts config that are associated a particular device.
+        attenuators: A list of all of the available attenuators objects
+            in the testbed.
+        attenuator_key: A string that is the key to search in the device's
+            configuration.
+
+    Returns:
+        A list of attenuator objects for the specified device and the key in
+        that device's config.
+    """
+    attenuator_list = []
+    for device_attenuator_config in device_attenuator_configs:
+        c = MapValidator(device_attenuator_config)
+        ports = c.list(attenuator_key).all(int)
+        for port in ports:
+            for attenuator in attenuators:
+                if (
+                    attenuator.instrument.address == device_attenuator_config["Address"]
+                    and attenuator.idx is port
+                ):
+                    attenuator_list.append(attenuator)
+    return attenuator_list
+
+
+#
+# Classes for accessing, managing, and manipulating attenuators.
+#
+# Users will instantiate a specific child class, but almost all operation should
+# be performed on the methods and data members defined here in the base classes
+# or the wrapper classes.
+#
+
+
+class AttenuatorError(Exception):
+    """Base class for all errors generated by Attenuator-related modules."""
+
+
+class InvalidDataError(AttenuatorError):
+    """ "Raised when an unexpected result is seen on the transport layer.
+
+    When this exception is seen, closing an re-opening the link to the
+    attenuator instrument is probably necessary. Something has gone wrong in
+    the transport.
+    """
+
+
+class InvalidOperationError(AttenuatorError):
+    """Raised when the attenuator's state does not allow the given operation.
+
+    Certain methods may only be accessed when the instance upon which they are
+    invoked is in a certain state. This indicates that the object is not in the
+    correct state for a method to be called.
+    """
+
+
+INVALID_MAX_ATTEN: float = 999.9
+
+
+@runtime_checkable
+class AttenuatorInstrument(Protocol):
+    """Defines the primitive behavior of all attenuator instruments.
+
+    The AttenuatorInstrument class is designed to provide a simple low-level
+    interface for accessing any step attenuator instrument comprised of one or
+    more attenuators and a controller. All AttenuatorInstruments should override
+    all the methods below and call AttenuatorInstrument.__init__ in their
+    constructors. Outside of setup/teardown, devices should be accessed via
+    this generic "interface".
+    """
+
+    @property
+    def address(self) -> str | None:
+        """Return the address to the attenuator."""
+        ...
+
+    @property
+    def num_atten(self) -> int:
+        """Return the index used to identify this attenuator in an instrument."""
+        ...
+
+    @property
+    def max_atten(self) -> float:
+        """Return the maximum allowed attenuation value."""
+        ...
+
+    def open(self, host: str, port: int, timeout_sec: int = 5) -> None:
+        """Initiate a connection to the attenuator.
+
+        Args:
+            host: A valid hostname to an attenuator
+            port: Port number to attempt connection
+            timeout_sec: Seconds to wait to initiate a connection
+        """
+        ...
+
+    def close(self) -> None:
+        """Close the connection to the attenuator."""
+        ...
+
+    def set_atten(
+        self, idx: int, value: float, strict: bool = True, retry: bool = False
+    ) -> None:
+        """Sets the attenuation given its index in the instrument.
+
+        Args:
+            idx: Index used to identify a particular attenuator in an instrument
+            value: Value for nominal attenuation to be set
+            strict: If True, raise an error when given out of bounds attenuation
+            retry: If True, command will be retried if possible
+        """
+        ...
+
+    def get_atten(self, idx: int, retry: bool = False) -> float:
+        """Returns the current attenuation given its index in the instrument.
+
+        Args:
+            idx: Index used to identify a particular attenuator in an instrument
+            retry: If True, command will be retried if possible
+
+        Returns:
+            The current attenuation value
+        """
+        ...
+
+
+class Attenuator(object):
+    """An object representing a single attenuator in a remote instrument.
+
+    A user wishing to abstract the mapping of attenuators to physical
+    instruments should use this class, which provides an object that abstracts
+    the physical implementation and allows the user to think only of attenuators
+    regardless of their location.
+    """
+
+    def __init__(
+        self, instrument: AttenuatorInstrument, idx: int = 0, offset: int = 0
+    ) -> None:
+        """This is the constructor for Attenuator
+
+        Args:
+            instrument: Reference to an AttenuatorInstrument on which the
+                Attenuator resides
+            idx: This zero-based index is the identifier for a particular
+                attenuator in an instrument.
+            offset: A power offset value for the attenuator to be used when
+                performing future operations. This could be used for either
+                calibration or to allow group operations with offsets between
+                various attenuators.
+
+        Raises:
+            TypeError if an invalid AttenuatorInstrument is passed in.
+            IndexError if the index is out of range.
+        """
+        if not isinstance(instrument, AttenuatorInstrument):
+            raise TypeError("Must provide an Attenuator Instrument Ref")
+        self.instrument = instrument
+        self.idx = idx
+        self.offset = offset
+
+        if self.idx >= instrument.num_atten:
+            raise IndexError("Attenuator index out of range for attenuator instrument")
+
+    def set_atten(self, value: float, strict: bool = True, retry: bool = False) -> None:
+        """Sets the attenuation.
+
+        Args:
+            value: A floating point value for nominal attenuation to be set.
+            strict: if True, function raises an error when given out of
+                bounds attenuation values, if false, the function sets out of
+                bounds values to 0 or max_atten.
+            retry: if True, command will be retried if possible
+
+        Raises:
+            ValueError if value + offset is greater than the maximum value.
+        """
+        if value + self.offset > self.instrument.max_atten and strict:
+            raise ValueError("Attenuator Value+Offset greater than Max Attenuation!")
+
+        self.instrument.set_atten(
+            self.idx, value + self.offset, strict=strict, retry=retry
+        )
+
+    def get_atten(self, retry: bool = False) -> float:
+        """Returns the attenuation as a float, normalized by the offset."""
+        return self.instrument.get_atten(self.idx, retry) - self.offset
+
+    def get_max_atten(self) -> float:
+        """Returns the max attenuation as a float, normalized by the offset."""
+        if self.instrument.max_atten == INVALID_MAX_ATTEN:
+            raise ValueError("Invalid Max Attenuator Value")
+
+        return self.instrument.max_atten - self.offset
diff --git a/src/antlion/controllers/attenuator_lib/__init__.py b/packages/antlion/controllers/attenuator_lib/__init__.py
similarity index 100%
rename from src/antlion/controllers/attenuator_lib/__init__.py
rename to packages/antlion/controllers/attenuator_lib/__init__.py
diff --git a/src/antlion/controllers/attenuator_lib/_tnhelper.py b/packages/antlion/controllers/attenuator_lib/_tnhelper.py
similarity index 64%
rename from src/antlion/controllers/attenuator_lib/_tnhelper.py
rename to packages/antlion/controllers/attenuator_lib/_tnhelper.py
index 61b4193..8ea8289 100644
--- a/src/antlion/controllers/attenuator_lib/_tnhelper.py
+++ b/packages/antlion/controllers/attenuator_lib/_tnhelper.py
@@ -19,8 +19,9 @@
 """
 
 import logging
-import telnetlib
 import re
+import telnetlib
+
 from antlion.controllers import attenuator
 from antlion.libs.proc import job
 
@@ -29,40 +30,44 @@
     return str(uc_string).encode("ASCII")
 
 
-class _TNHelper(object):
+class TelnetHelper(object):
     """An internal helper class for Telnet+SCPI command-based instruments.
 
     It should only be used by those implementation control libraries and not by
     any user code directly.
     """
 
-    def __init__(self, tx_cmd_separator="\n", rx_cmd_separator="\n", prompt=""):
-        self._tn = None
-        self._ip_address = None
-        self._port = None
+    def __init__(
+        self,
+        tx_cmd_separator: str = "\n",
+        rx_cmd_separator: str = "\n",
+        prompt: str = "",
+    ) -> None:
+        self._tn: telnetlib.Telnet | None = None
+        self._ip_address: str | None = None
+        self._port: int | None = None
 
         self.tx_cmd_separator = tx_cmd_separator
         self.rx_cmd_separator = rx_cmd_separator
         self.prompt = prompt
 
-    def open(self, host, port=23):
+    def open(self, host: str, port: int = 23) -> None:
         self._ip_address = host
         self._port = port
         if self._tn:
             self._tn.close()
-        logging.debug("Telnet Server IP = %s" % host)
-        self._tn = telnetlib.Telnet()
-        self._tn.open(host, port, 10)
+        logging.debug("Telnet Server IP = %s", host)
+        self._tn = telnetlib.Telnet(host, port, timeout=10)
 
-    def is_open(self):
-        return bool(self._tn)
+    def is_open(self) -> bool:
+        return self._tn is not None
 
-    def close(self):
+    def close(self) -> None:
         if self._tn:
             self._tn.close()
             self._tn = None
 
-    def diagnose_telnet(self):
+    def diagnose_telnet(self, host: str, port: int) -> bool:
         """Function that diagnoses telnet connections.
 
         This function diagnoses telnet connections and can be used in case of
@@ -77,32 +82,32 @@
         """
         logging.debug("Diagnosing telnet connection")
         try:
-            job_result = job.run("ping {} -c 5 -i 0.2".format(self._ip_address))
-        except:
-            logging.error("Unable to ping telnet server.")
+            job_result = job.run(f"ping {host} -c 5 -i 0.2")
+        except Exception as e:
+            logging.error("Unable to ping telnet server: %s", e)
             return False
-        ping_output = job_result.stdout
+        ping_output = job_result.stdout.decode("utf-8")
         if not re.search(r" 0% packet loss", ping_output):
-            logging.error("Ping Packets Lost. Result: {}".format(ping_output))
+            logging.error("Ping Packets Lost. Result: %s", ping_output)
             return False
         try:
             self.close()
-        except:
-            logging.error("Cannot close telnet connection.")
+        except Exception as e:
+            logging.error("Cannot close telnet connection: %s", e)
             return False
         try:
-            self.open(self._ip_address, self._port)
-        except:
-            logging.error("Cannot reopen telnet connection.")
+            self.open(host, port)
+        except Exception as e:
+            logging.error("Cannot reopen telnet connection: %s", e)
             return False
         logging.debug("Telnet connection likely recovered")
         return True
 
-    def cmd(self, cmd_str, wait_ret=True, retry=False):
+    def cmd(self, cmd_str: str, retry: bool = False) -> str:
         if not isinstance(cmd_str, str):
             raise TypeError("Invalid command string", cmd_str)
 
-        if not self.is_open():
+        if self._tn is None or self._ip_address is None or self._port is None:
             raise attenuator.InvalidOperationError(
                 "Telnet connection not open for commands"
             )
@@ -111,29 +116,25 @@
         self._tn.read_until(_ascii_string(self.prompt), 2)
         self._tn.write(_ascii_string(cmd_str + self.tx_cmd_separator))
 
-        if wait_ret is False:
-            return None
-
         match_idx, match_val, ret_text = self._tn.expect(
-            [_ascii_string("\S+" + self.rx_cmd_separator)], 1
+            [_ascii_string(f"\\S+{self.rx_cmd_separator}")], 1
         )
 
-        logging.debug("Telnet Command: {}".format(cmd_str))
-        logging.debug("Telnet Reply: ({},{},{})".format(match_idx, match_val, ret_text))
+        logging.debug("Telnet Command: %s", cmd_str)
+        logging.debug("Telnet Reply: (%s, %s, %s)", match_idx, match_val, ret_text)
 
         if match_idx == -1:
-            telnet_recovered = self.diagnose_telnet()
+            telnet_recovered = self.diagnose_telnet(self._ip_address, self._port)
             if telnet_recovered and retry:
                 logging.debug("Retrying telnet command once.")
-                return self.cmd(cmd_str, wait_ret, retry=False)
+                return self.cmd(cmd_str, retry=False)
             else:
                 raise attenuator.InvalidDataError(
                     "Telnet command failed to return valid data"
                 )
 
-        ret_text = ret_text.decode()
-        ret_text = ret_text.strip(
+        ret_str = ret_text.decode()
+        ret_str = ret_str.strip(
             self.tx_cmd_separator + self.rx_cmd_separator + self.prompt
         )
-
-        return ret_text
+        return ret_str
diff --git a/src/antlion/controllers/attenuator_lib/aeroflex/__init__.py b/packages/antlion/controllers/attenuator_lib/aeroflex/__init__.py
similarity index 100%
rename from src/antlion/controllers/attenuator_lib/aeroflex/__init__.py
rename to packages/antlion/controllers/attenuator_lib/aeroflex/__init__.py
diff --git a/packages/antlion/controllers/attenuator_lib/aeroflex/telnet.py b/packages/antlion/controllers/attenuator_lib/aeroflex/telnet.py
new file mode 100644
index 0000000..f4544f3
--- /dev/null
+++ b/packages/antlion/controllers/attenuator_lib/aeroflex/telnet.py
@@ -0,0 +1,136 @@
+#!/usr/bin/env python3
+
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Class for Telnet control of Aeroflex 832X and 833X Series Attenuator Modules
+
+This class provides a wrapper to the Aeroflex attenuator modules for purposes
+of simplifying and abstracting control down to the basic necessities. It is
+not the intention of the module to expose all functionality, but to allow
+interchangeable HW to be used.
+
+See http://www.aeroflex.com/ams/weinschel/PDFILES/IM-608-Models-8320-&-8321-preliminary.pdf
+"""
+
+from antlion.controllers import attenuator
+from antlion.controllers.attenuator_lib import _tnhelper
+
+
+class AttenuatorInstrument(attenuator.AttenuatorInstrument):
+    def __init__(self, num_atten: int = 0) -> None:
+        self._num_atten = num_atten
+        self._max_atten = attenuator.INVALID_MAX_ATTEN
+
+        self._tnhelper = _tnhelper.TelnetHelper(
+            tx_cmd_separator="\r\n", rx_cmd_separator="\r\n", prompt=">"
+        )
+        self._properties: dict[str, str] | None = None
+        self._address: str | None = None
+
+    @property
+    def address(self) -> str | None:
+        return self._address
+
+    @property
+    def num_atten(self) -> int:
+        return self._num_atten
+
+    @property
+    def max_atten(self) -> float:
+        return self._max_atten
+
+    def open(self, host: str, port: int, _timeout_sec: int = 5) -> None:
+        """Initiate a connection to the attenuator.
+
+        Args:
+            host: A valid hostname to an attenuator
+            port: Port number to attempt connection
+            timeout_sec: Seconds to wait to initiate a connection
+        """
+        self._tnhelper.open(host, port)
+
+        # work around a bug in IO, but this is a good thing to do anyway
+        self._tnhelper.cmd("*CLS", False)
+        self._address = host
+
+        if self._num_atten == 0:
+            self._num_atten = int(self._tnhelper.cmd("RFCONFIG? CHAN"))
+
+        configstr = self._tnhelper.cmd("RFCONFIG? ATTN 1")
+
+        self._properties = dict(
+            zip(
+                ["model", "max_atten", "min_step", "unknown", "unknown2", "cfg_str"],
+                configstr.split(", ", 5),
+            )
+        )
+
+        self._max_atten = float(self._properties["max_atten"])
+
+    def close(self) -> None:
+        """Close the connection to the attenuator."""
+        self._tnhelper.close()
+
+    def set_atten(
+        self, idx: int, value: float, _strict: bool = True, _retry: bool = False
+    ) -> None:
+        """Sets the attenuation given its index in the instrument.
+
+        Args:
+            idx: Index used to identify a particular attenuator in an instrument
+            value: Value for nominal attenuation to be set
+            strict: If True, raise an error when given out of bounds attenuation
+            retry: If True, command will be retried if possible
+
+        Raises:
+            InvalidOperationError if the telnet connection is not open.
+            IndexError if the index is not valid for this instrument.
+            ValueError if the requested set value is greater than the maximum
+                attenuation value.
+        """
+        if not self._tnhelper.is_open():
+            raise attenuator.InvalidOperationError("Connection not open!")
+
+        if idx >= self._num_atten:
+            raise IndexError("Attenuator index out of range!", self._num_atten, idx)
+
+        if value > self._max_atten:
+            raise ValueError("Attenuator value out of range!", self._max_atten, value)
+
+        self._tnhelper.cmd(f"ATTN {idx + 1} {value}", False)
+
+    def get_atten(self, idx: int, _retry: bool = False) -> float:
+        """Returns the current attenuation given its index in the instrument.
+
+        Args:
+            idx: Index used to identify a particular attenuator in an instrument
+            retry: If True, command will be retried if possible
+
+        Raises:
+            InvalidOperationError if the telnet connection is not open.
+
+        Returns:
+            The current attenuation value
+        """
+        if not self._tnhelper.is_open():
+            raise attenuator.InvalidOperationError("Connection not open!")
+
+        #       Potentially redundant safety check removed for the moment
+        #       if idx >= self.num_atten:
+        #           raise IndexError("Attenuator index out of range!", self.num_atten, idx)
+
+        atten_val = self._tnhelper.cmd(f"ATTN? {idx + 1}")
+
+        return float(atten_val)
diff --git a/src/antlion/controllers/attenuator_lib/minicircuits/__init__.py b/packages/antlion/controllers/attenuator_lib/minicircuits/__init__.py
similarity index 100%
rename from src/antlion/controllers/attenuator_lib/minicircuits/__init__.py
rename to packages/antlion/controllers/attenuator_lib/minicircuits/__init__.py
diff --git a/packages/antlion/controllers/attenuator_lib/minicircuits/http.py b/packages/antlion/controllers/attenuator_lib/minicircuits/http.py
new file mode 100644
index 0000000..98118ad
--- /dev/null
+++ b/packages/antlion/controllers/attenuator_lib/minicircuits/http.py
@@ -0,0 +1,158 @@
+#!/usr/bin/env python3
+
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Class for HTTP control of Mini-Circuits RCDAT series attenuators
+
+This class provides a wrapper to the MC-RCDAT attenuator modules for purposes
+of simplifying and abstracting control down to the basic necessities. It is
+not the intention of the module to expose all functionality, but to allow
+interchangeable HW to be used.
+
+See http://www.minicircuits.com/softwaredownload/Prog_Manual-6-Programmable_Attenuator.pdf
+"""
+
+import urllib.request
+
+from antlion.controllers import attenuator
+
+
+class AttenuatorInstrument(attenuator.AttenuatorInstrument):
+    """A specific HTTP-controlled implementation of AttenuatorInstrument for
+    Mini-Circuits RC-DAT attenuators.
+
+    With the exception of HTTP-specific commands, all functionality is defined
+    by the AttenuatorInstrument class.
+    """
+
+    def __init__(self, num_atten: int = 1) -> None:
+        self._num_atten = num_atten
+        self._max_atten = attenuator.INVALID_MAX_ATTEN
+
+        self._ip_address: str | None = None
+        self._port: int | None = None
+        self._timeout: int | None = None
+        self._address: str | None = None
+
+    @property
+    def address(self) -> str | None:
+        return self._address
+
+    @property
+    def num_atten(self) -> int:
+        return self._num_atten
+
+    @property
+    def max_atten(self) -> float:
+        return self._max_atten
+
+    def open(self, host: str, port: int = 80, timeout_sec: int = 2) -> None:
+        """Initiate a connection to the attenuator.
+
+        Args:
+            host: A valid hostname to an attenuator
+            port: Port number to attempt connection
+            timeout_sec: Seconds to wait to initiate a connection
+        """
+        self._ip_address = host
+        self._port = port
+        self._timeout = timeout_sec
+        self._address = host
+
+        att_req = urllib.request.urlopen(f"http://{self._ip_address}:{self._port}/MN?")
+        config_str = att_req.read().decode("utf-8").strip()
+        if not config_str.startswith("MN="):
+            raise attenuator.InvalidDataError(
+                f"Attenuator returned invalid data. Attenuator returned: {config_str}"
+            )
+
+        config_str = config_str[len("MN=") :]
+        properties = dict(
+            zip(["model", "max_freq", "max_atten"], config_str.split("-", 2))
+        )
+        self._max_atten = float(properties["max_atten"])
+
+    def close(self) -> None:
+        """Close the connection to the attenuator."""
+        # Since this controller is based on HTTP requests, there is no
+        # connection teardown required.
+
+    def set_atten(
+        self, idx: int, value: float, strict: bool = True, retry: bool = False
+    ) -> None:
+        """Sets the attenuation given its index in the instrument.
+
+        Args:
+            idx: Index used to identify a particular attenuator in an instrument
+            value: Value for nominal attenuation to be set
+            strict: If True, raise an error when given out of bounds attenuation
+            retry: If True, command will be retried if possible
+
+        Raises:
+            InvalidDataError if the attenuator does not respond with the
+            expected output.
+        """
+        if not (0 <= idx < self._num_atten):
+            raise IndexError("Attenuator index out of range!", self._num_atten, idx)
+
+        if value > self._max_atten and strict:
+            raise ValueError("Attenuator value out of range!", self._max_atten, value)
+        # The actual device uses one-based index for channel numbers.
+        adjusted_value = min(max(0, value), self._max_atten)
+        att_req = urllib.request.urlopen(
+            "http://{}:{}/CHAN:{}:SETATT:{}".format(
+                self._ip_address, self._port, idx + 1, adjusted_value
+            ),
+            timeout=self._timeout,
+        )
+        att_resp = att_req.read().decode("utf-8").strip()
+        if att_resp != "1":
+            if retry:
+                self.set_atten(idx, value, strict, retry=False)
+            else:
+                raise attenuator.InvalidDataError(
+                    f"Attenuator returned invalid data. Attenuator returned: {att_resp}"
+                )
+
+    def get_atten(self, idx: int, retry: bool = False) -> float:
+        """Returns the current attenuation of the attenuator at the given index.
+
+        Args:
+            idx: The index of the attenuator.
+            retry: if True, command will be retried if possible
+
+        Raises:
+            InvalidDataError if the attenuator does not respond with the
+            expected output
+
+        Returns:
+            the current attenuation value as a float
+        """
+        if not (0 <= idx < self._num_atten):
+            raise IndexError("Attenuator index out of range!", self._num_atten, idx)
+        att_req = urllib.request.urlopen(
+            f"http://{self._ip_address}:{self._port}/CHAN:{idx + 1}:ATT?",
+            timeout=self._timeout,
+        )
+        att_resp = att_req.read().decode("utf-8").strip()
+        try:
+            return float(att_resp)
+        except TypeError as e:
+            if retry:
+                return self.get_atten(idx, retry=False)
+
+            raise attenuator.InvalidDataError(
+                f"Attenuator returned invalid data. Attenuator returned: {att_resp}"
+            ) from e
diff --git a/packages/antlion/controllers/attenuator_lib/minicircuits/telnet.py b/packages/antlion/controllers/attenuator_lib/minicircuits/telnet.py
new file mode 100644
index 0000000..bd70386
--- /dev/null
+++ b/packages/antlion/controllers/attenuator_lib/minicircuits/telnet.py
@@ -0,0 +1,148 @@
+#!/usr/bin/env python3
+
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Class for Telnet control of Mini-Circuits RCDAT series attenuators
+
+This class provides a wrapper to the MC-RCDAT attenuator modules for purposes
+of simplifying and abstracting control down to the basic necessities. It is
+not the intention of the module to expose all functionality, but to allow
+interchangeable HW to be used.
+
+See http://www.minicircuits.com/softwaredownload/Prog_Manual-6-Programmable_Attenuator.pdf
+"""
+
+from antlion.controllers import attenuator
+from antlion.controllers.attenuator_lib import _tnhelper
+
+
+class AttenuatorInstrument(attenuator.AttenuatorInstrument):
+    """A specific telnet-controlled implementation of AttenuatorInstrument for
+    Mini-Circuits RC-DAT attenuators.
+
+    With the exception of telnet-specific commands, all functionality is defined
+    by the AttenuatorInstrument class. Because telnet is a stateful protocol,
+    the functionality of AttenuatorInstrument is contingent upon a telnet
+    connection being established.
+    """
+
+    def __init__(self, num_atten: int = 0) -> None:
+        self._num_atten = num_atten
+        self._max_atten = attenuator.INVALID_MAX_ATTEN
+        self.properties: dict[str, str] | None = None
+        self._tnhelper = _tnhelper.TelnetHelper(
+            tx_cmd_separator="\r\n", rx_cmd_separator="\r\n", prompt=""
+        )
+        self._address: str | None = None
+
+    @property
+    def address(self) -> str | None:
+        return self._address
+
+    @property
+    def num_atten(self) -> int:
+        return self._num_atten
+
+    @property
+    def max_atten(self) -> float:
+        return self._max_atten
+
+    def __del__(self) -> None:
+        if self._tnhelper.is_open():
+            self.close()
+
+    def open(self, host: str, port: int, _timeout_sec: int = 5) -> None:
+        """Initiate a connection to the attenuator.
+
+        Args:
+            host: A valid hostname to an attenuator
+            port: Port number to attempt connection
+            timeout_sec: Seconds to wait to initiate a connection
+        """
+        self._tnhelper.open(host, port)
+        self._address = host
+
+        if self._num_atten == 0:
+            self._num_atten = 1
+
+        config_str = self._tnhelper.cmd("MN?")
+
+        if config_str.startswith("MN="):
+            config_str = config_str[len("MN=") :]
+
+        self.properties = dict(
+            zip(["model", "max_freq", "max_atten"], config_str.split("-", 2))
+        )
+        self._max_atten = float(self.properties["max_atten"])
+
+    def close(self) -> None:
+        """Close the connection to the attenuator."""
+        self._tnhelper.close()
+
+    def set_atten(
+        self, idx: int, value: float, strict: bool = True, retry: bool = False
+    ) -> None:
+        """Sets the attenuation given its index in the instrument.
+
+        Args:
+            idx: Index used to identify a particular attenuator in an instrument
+            value: Value for nominal attenuation to be set
+            strict: If True, raise an error when given out of bounds attenuation
+            retry: If True, command will be retried if possible
+
+        Raises:
+            InvalidOperationError if the telnet connection is not open.
+            IndexError if the index is not valid for this instrument.
+            ValueError if the requested set value is greater than the maximum
+                attenuation value.
+        """
+
+        if not self._tnhelper.is_open():
+            raise attenuator.InvalidOperationError("Connection not open!")
+
+        if idx >= self._num_atten:
+            raise IndexError("Attenuator index out of range!", self._num_atten, idx)
+
+        if value > self._max_atten and strict:
+            raise ValueError("Attenuator value out of range!", self._max_atten, value)
+        # The actual device uses one-based index for channel numbers.
+        adjusted_value = min(max(0, value), self._max_atten)
+        self._tnhelper.cmd(f"CHAN:{idx + 1}:SETATT:{adjusted_value}", retry=retry)
+
+    def get_atten(self, idx: int, retry: bool = False) -> float:
+        """Returns the current attenuation given its index in the instrument.
+
+        Args:
+            idx: Index used to identify a particular attenuator in an instrument
+            retry: If True, command will be retried if possible
+
+        Returns:
+            The current attenuation value
+
+        Raises:
+            InvalidOperationError if the telnet connection is not open.
+        """
+        if not self._tnhelper.is_open():
+            raise attenuator.InvalidOperationError("Connection not open!")
+
+        if idx >= self._num_atten or idx < 0:
+            raise IndexError("Attenuator index out of range!", self._num_atten, idx)
+
+        if self._num_atten == 1:
+            atten_val_str = self._tnhelper.cmd(":ATT?", retry=retry)
+        else:
+            atten_val_str = self._tnhelper.cmd(f"CHAN:{idx + 1}:ATT?", retry=retry)
+        atten_val = float(atten_val_str)
+        return atten_val
diff --git a/src/antlion/controllers/fastboot.py b/packages/antlion/controllers/fastboot.py
similarity index 91%
rename from src/antlion/controllers/fastboot.py
rename to packages/antlion/controllers/fastboot.py
index ed67245..40fa702 100755
--- a/src/antlion/controllers/fastboot.py
+++ b/packages/antlion/controllers/fastboot.py
@@ -14,9 +14,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from antlion.libs.proc import job
-
 from antlion import error
+from antlion.libs.proc import job
 
 
 class FastbootError(error.ActsError):
@@ -47,17 +46,19 @@
     def __init__(self, serial="", ssh_connection=None):
         self.serial = serial
         if serial:
-            self.fastboot_str = "fastboot -s {}".format(serial)
+            self.fastboot_str = f"fastboot -s {serial}"
         else:
             self.fastboot_str = "fastboot"
         self.ssh_connection = ssh_connection
 
     def _exec_fastboot_cmd(self, name, arg_str, ignore_status=False, timeout=60):
-        command = " ".join((self.fastboot_str, name, arg_str))
+        command = f"{self.fastboot_str} {name} {arg_str}"
         if self.ssh_connection:
-            result = self.connection.run(command, ignore_status=True, timeout=timeout)
+            result = self.ssh_connection.run(
+                command, ignore_status=True, timeout_sec=timeout
+            )
         else:
-            result = job.run(command, ignore_status=True, timeout=timeout)
+            result = job.run(command, ignore_status=True, timeout_sec=timeout)
         ret, out, err = result.exit_status, result.stdout, result.stderr
         # TODO: This is only a temporary workaround for b/34815412.
         # fastboot getvar outputs to stderr instead of stdout
diff --git a/packages/antlion/controllers/fuchsia_device.py b/packages/antlion/controllers/fuchsia_device.py
new file mode 100644
index 0000000..6b65b73
--- /dev/null
+++ b/packages/antlion/controllers/fuchsia_device.py
@@ -0,0 +1,812 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+import logging
+import os
+import re
+import socket
+import textwrap
+import time
+from ipaddress import ip_address
+from typing import Any
+
+import honeydew
+from honeydew.affordances.connectivity.wlan.utils.types import CountryCode
+from honeydew.auxiliary_devices.power_switch.power_switch_using_dmc import (
+    PowerSwitchDmcError,
+    PowerSwitchUsingDmc,
+)
+from honeydew.transports.ffx.config import FfxConfig
+from honeydew.transports.ffx.ffx import FFX
+from honeydew.typing.custom_types import DeviceInfo, IpPort
+from mobly import logger, signals
+
+from antlion import context, utils
+from antlion.capabilities.ssh import DEFAULT_SSH_PORT, SSHConfig
+from antlion.controllers import pdu
+from antlion.controllers.fuchsia_lib.lib_controllers.wlan_controller import (
+    WlanController,
+)
+from antlion.controllers.fuchsia_lib.lib_controllers.wlan_policy_controller import (
+    WlanPolicyController,
+)
+from antlion.controllers.fuchsia_lib.package_server import PackageServer
+from antlion.controllers.fuchsia_lib.sl4f import SL4F
+from antlion.controllers.fuchsia_lib.ssh import (
+    DEFAULT_SSH_PRIVATE_KEY,
+    DEFAULT_SSH_USER,
+    FuchsiaSSHProvider,
+)
+from antlion.decorators import cached_property
+from antlion.runner import CalledProcessError
+from antlion.types import ControllerConfig, Json
+from antlion.utils import (
+    PingResult,
+    get_fuchsia_mdns_ipv6_address,
+    get_interface_ip_addresses,
+)
+from antlion.validation import FieldNotFoundError, MapValidator
+
+MOBLY_CONTROLLER_CONFIG_NAME: str = "FuchsiaDevice"
+ACTS_CONTROLLER_REFERENCE_NAME = "fuchsia_devices"
+
+FUCHSIA_RECONNECT_AFTER_REBOOT_TIME = 5
+
+FUCHSIA_REBOOT_TYPE_SOFT = "soft"
+FUCHSIA_REBOOT_TYPE_HARD = "hard"
+
+FUCHSIA_DEFAULT_CONNECT_TIMEOUT = 90
+FUCHSIA_DEFAULT_COMMAND_TIMEOUT = 60
+
+FUCHSIA_DEFAULT_CLEAN_UP_COMMAND_TIMEOUT = 15
+
+FUCHSIA_COUNTRY_CODE_TIMEOUT = 15
+FUCHSIA_DEFAULT_COUNTRY_CODE_US = "US"
+
+MDNS_LOOKUP_RETRY_MAX = 3
+
+FFX_PROXY_TIMEOUT_SEC = 3
+
+# Duration to wait for the Fuchsia device to acquire an IP address after
+# requested to join a network.
+#
+# Acquiring an IP address after connecting to a WLAN network could take up to
+# 15 seconds if we get unlucky:
+#
+#  1. An outgoing passive scan just started (~7s)
+#  2. An active scan is queued for the newly saved network (~7s)
+#  3. The initial connection attempt fails (~1s)
+IP_ADDRESS_TIMEOUT = 30
+
+
+class FuchsiaDeviceError(signals.ControllerError):
+    pass
+
+
+class FuchsiaConfigError(signals.ControllerError):
+    """Incorrect FuchsiaDevice configuration."""
+
+
+def create(configs: list[ControllerConfig]) -> list[FuchsiaDevice]:
+    return [FuchsiaDevice(c) for c in configs]
+
+
+def destroy(objects: list[FuchsiaDevice]) -> None:
+    for fd in objects:
+        fd.clean_up()
+        del fd
+
+
+def get_info(objects: list[FuchsiaDevice]) -> list[Json]:
+    """Get information on a list of FuchsiaDevice objects."""
+    return [{"ip": fd.ip} for fd in objects]
+
+
+class FuchsiaDevice:
+    """Class representing a Fuchsia device.
+
+    Each object of this class represents one Fuchsia device in ACTS.
+
+    Attributes:
+        ip: The full address or Fuchsia abstract name to contact the Fuchsia
+            device at
+        log: A logger object.
+        ssh_port: The SSH TCP port number of the Fuchsia device.
+        sl4f_port: The SL4F HTTP port number of the Fuchsia device.
+        ssh_config: The ssh_config for connecting to the Fuchsia device.
+    """
+
+    def __init__(self, controller_config: ControllerConfig) -> None:
+        config = MapValidator(controller_config)
+        self.ip = config.get(str, "ip")
+        if "%" in self.ip:
+            addr, scope_id = self.ip.split("%", 1)
+            try:
+                if_name = socket.if_indextoname(int(scope_id))
+                self.ip = f"{addr}%{if_name}"
+            except ValueError:
+                # Scope ID is likely already the interface name, no change necessary.
+                pass
+        self.orig_ip = self.ip
+        self.sl4f_port = config.get(int, "sl4f_port", 80)
+        self.ssh_username = config.get(str, "ssh_username", DEFAULT_SSH_USER)
+        self.ssh_port = config.get(int, "ssh_port", DEFAULT_SSH_PORT)
+        self.ssh_binary_path = config.get(str, "ssh_binary_path", "ssh")
+
+        def expand(path: str) -> str:
+            return os.path.expandvars(os.path.expanduser(path))
+
+        def path_from_config(name: str, default: str | None = None) -> str | None:
+            path = config.get(str, name, default)
+            return None if path is None else expand(path)
+
+        def assert_exists(name: str, path: str | None) -> None:
+            if path is None:
+                raise FuchsiaDeviceError(
+                    f'Please specify "${name}" in your configuration file'
+                )
+            if not os.path.exists(path):
+                raise FuchsiaDeviceError(
+                    f'Please specify a correct "${name}" in your configuration '
+                    f'file: "{path}" does not exist'
+                )
+
+        self.specific_image: str | None = path_from_config("specific_image")
+        if self.specific_image:
+            assert_exists("specific_image", self.specific_image)
+
+        # Path to a tar.gz archive with pm and amber-files, as necessary for
+        # starting a package server.
+        self.packages_archive_path: str | None = path_from_config(
+            "packages_archive_path"
+        )
+        if self.packages_archive_path:
+            assert_exists("packages_archive_path", self.packages_archive_path)
+
+        def required_path_from_config(name: str, default: str | None = None) -> str:
+            path = path_from_config(name, default)
+            if path is None:
+                raise FuchsiaConfigError(f"{name} is a required config field")
+            assert_exists(name, path)
+            return path
+
+        self.ssh_priv_key: str = required_path_from_config(
+            "ssh_priv_key", DEFAULT_SSH_PRIVATE_KEY
+        )
+        self.ffx_binary_path: str = required_path_from_config(
+            "ffx_binary_path", "${FUCHSIA_DIR}/.jiri_root/bin/ffx"
+        )
+        self.ffx_subtools_search_path: str | None = path_from_config(
+            "ffx_subtools_search_path"
+        )
+
+        self.authorized_file = config.get(str, "authorized_file_loc", None)
+        self.serial_number = config.get(str, "serial_number", None)
+        self.device_type = config.get(str, "device_type", None)
+        self.product_type = config.get(str, "product_type", None)
+        self.board_type = config.get(str, "board_type", None)
+        self.build_number = config.get(str, "build_number", None)
+        self.build_type = config.get(str, "build_type", None)
+        self.mdns_name = config.get(str, "mdns_name", None)
+
+        self.hard_reboot_on_fail = config.get(bool, "hard_reboot_on_fail", False)
+        self.take_bug_report_on_fail = config.get(
+            bool, "take_bug_report_on_fail", False
+        )
+        self.device_pdu_config = config.get(dict, "PduDevice", {})
+        self.config_country_code = config.get(
+            str, "country_code", FUCHSIA_DEFAULT_COUNTRY_CODE_US
+        ).upper()
+
+        output_path = context.get_current_context().get_base_output_path()
+        self.ssh_config = os.path.join(output_path, f"ssh_config_{self.ip}")
+        self._generate_ssh_config(self.ssh_config)
+
+        # WLAN interface info is populated inside configure_wlan
+        self.wlan_client_interfaces: dict[str, Any] = {}
+        self.wlan_ap_interfaces: dict[str, Any] = {}
+        self.wlan_client_test_interface_name = config.get(
+            str, "wlan_client_test_interface", None
+        )
+        self.wlan_ap_test_interface_name = config.get(
+            str, "wlan_ap_test_interface", None
+        )
+        try:
+            self.wlan_features: list[str] = config.list("wlan_features").all(str)
+        except FieldNotFoundError:
+            self.wlan_features = []
+
+        # Whether to use 'policy' or 'drivers' for WLAN connect/disconnect calls
+        # If set to None, wlan is not configured.
+        self.association_mechanism: str | None = None
+        # Defaults to policy layer, unless otherwise specified in the config
+        self.default_association_mechanism = config.get(
+            str, "association_mechanism", "policy"
+        )
+
+        # Whether to clear and preserve existing saved networks and client
+        # connections state, to be restored at device teardown.
+        self.default_preserve_saved_networks = config.get(
+            bool, "preserve_saved_networks", True
+        )
+
+        if not utils.is_valid_ipv4_address(self.ip) and not utils.is_valid_ipv6_address(
+            self.ip
+        ):
+            mdns_ip = None
+            for _ in range(MDNS_LOOKUP_RETRY_MAX):
+                mdns_ip = get_fuchsia_mdns_ipv6_address(self.ip)
+                if mdns_ip:
+                    break
+                else:
+                    time.sleep(1)
+            if mdns_ip and utils.is_valid_ipv6_address(mdns_ip):
+                # self.ip was actually an mdns name. Use it for self.mdns_name
+                # unless one was explicitly provided.
+                self.mdns_name = self.mdns_name or self.ip
+                self.ip = mdns_ip
+            else:
+                raise ValueError(f"Invalid IP: {self.ip}")
+
+        self.log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[FuchsiaDevice | {self.orig_ip}]",
+            },
+        )
+
+        self.ping_rtt_match = re.compile(
+            r"RTT Min/Max/Avg = \[ ([0-9.]+) / ([0-9.]+) / ([0-9.]+) \] ms"
+        )
+        self.serial = re.sub("[.:%]", "_", self.ip)
+        self.package_server: PackageServer | None = None
+
+        # Create honeydew fuchsia_device.
+        if not self.mdns_name:
+            raise FuchsiaConfigError(
+                'Must provide "mdns_name: <device mDNS name>" in the device config'
+            )
+
+        ffx_config = FfxConfig()
+        ffx_config.setup(
+            binary_path=self.ffx_binary_path,
+            isolate_dir=None,
+            logs_dir=f"{logging.log_path}/ffx/",  # type: ignore[attr-defined]
+            logs_level="None",
+            enable_mdns=False,
+            subtools_search_path=self.ffx_subtools_search_path,
+            proxy_timeout_secs=FFX_PROXY_TIMEOUT_SEC,
+        )
+
+        self.honeydew_fd = honeydew.create_device(
+            device_info=DeviceInfo(
+                name=self.mdns_name,
+                ip_port=IpPort(ip_address(self.ip), self.ssh_port),
+                serial_socket=None,
+            ),
+            ffx_config_data=ffx_config.get_config(),
+            config={
+                "affordances": {
+                    "wlan": {
+                        "implementation": "fuchsia-controller",
+                    },
+                },
+            },
+        )
+
+    @cached_property
+    def sl4f(self) -> SL4F:
+        """Get the sl4f module configured for this device."""
+        self.log.info("Started SL4F server")
+        return SL4F(self.ssh, self.sl4f_port)
+
+    @cached_property
+    def ssh(self) -> FuchsiaSSHProvider:
+        """Get the SSH provider module configured for this device."""
+        if not self.ssh_port:
+            raise FuchsiaConfigError(
+                'Must provide "ssh_port: <int>" in the device config'
+            )
+        if not self.ssh_priv_key:
+            raise FuchsiaConfigError(
+                'Must provide "ssh_priv_key: <file path>" in the device config'
+            )
+        return FuchsiaSSHProvider(
+            SSHConfig(
+                self.ssh_username,
+                self.ip,
+                self.ssh_priv_key,
+                port=self.ssh_port,
+                ssh_binary=self.ssh_binary_path,
+            )
+        )
+
+    @property
+    def ffx(self) -> FFX:
+        """Returns the underlying Honeydew FFX transport object.
+
+        Returns:
+            The underlying Honeydew FFX transport object.
+
+        Raises:
+            FfxCommandError: Failed to instantiate.
+        """
+        return self.honeydew_fd.ffx
+
+    @cached_property
+    def wlan_policy_controller(self) -> WlanPolicyController:
+        return WlanPolicyController(self.honeydew_fd, self.ssh)
+
+    @cached_property
+    def wlan_controller(self) -> WlanController:
+        return WlanController(self.honeydew_fd)
+
+    def _generate_ssh_config(self, file_path: str) -> None:
+        """Generate and write an SSH config for Fuchsia to disk.
+
+        Args:
+            file_path: Path to write the generated SSH config
+        """
+        content = textwrap.dedent(
+            f"""\
+            Host *
+                CheckHostIP no
+                StrictHostKeyChecking no
+                ForwardAgent no
+                ForwardX11 no
+                GSSAPIDelegateCredentials no
+                UserKnownHostsFile /dev/null
+                User fuchsia
+                IdentitiesOnly yes
+                IdentityFile {self.ssh_priv_key}
+                ControlPersist yes
+                ControlMaster auto
+                ControlPath /tmp/fuchsia--%r@%h:%p
+                ServerAliveInterval 1
+                ServerAliveCountMax 1
+                LogLevel ERROR
+            """
+        )
+
+        with open(file_path, "w", encoding="utf-8") as file:
+            file.write(content)
+
+    def start_package_server(self) -> None:
+        if not self.packages_archive_path:
+            self.log.warn(
+                "packages_archive_path is not specified. "
+                "Assuming a package server is already running and configured on "
+                "the DUT. If this is not the case, either run your own package "
+                "server, or configure these fields appropriately. "
+                "This is usually required for the Fuchsia iPerf3 client or "
+                "other testing utilities not on device cache."
+            )
+            return
+        if self.package_server:
+            self.log.warn(
+                "Skipping to start the package server since is already running"
+            )
+            return
+
+        self.package_server = PackageServer(self.packages_archive_path)
+        self.package_server.start()
+        self.package_server.configure_device(self.ssh)
+
+    def update_wlan_interfaces(self) -> None:
+        """Retrieves WLAN interfaces from device and sets the FuchsiaDevice
+        attributes.
+        """
+        wlan_interfaces = self.wlan_controller.get_interfaces_by_role()
+        self.wlan_client_interfaces = wlan_interfaces.client
+        self.wlan_ap_interfaces = wlan_interfaces.ap
+
+        # Set test interfaces to value from config, else the first found
+        # interface, else None
+        if self.wlan_client_test_interface_name is None:
+            self.wlan_client_test_interface_name = next(
+                iter(self.wlan_client_interfaces), None
+            )
+
+        if self.wlan_ap_test_interface_name is None:
+            self.wlan_ap_test_interface_name = next(iter(self.wlan_ap_interfaces), None)
+
+    def configure_wlan(
+        self,
+        association_mechanism: str | None = None,
+        preserve_saved_networks: bool | None = None,
+    ) -> None:
+        """
+        Readies device for WLAN functionality. If applicable, connects to the
+        policy layer and clears/saves preexisting saved networks.
+
+        Args:
+            association_mechanism: either 'policy' or 'drivers'. If None, uses
+                the default value from init (can be set by ACTS config)
+            preserve_saved_networks: whether to clear existing saved
+                networks, and preserve them for restoration later. If None, uses
+                the default value from init (can be set by ACTS config)
+
+        Raises:
+            FuchsiaDeviceError, if configuration fails
+        """
+        self.wlan_controller.set_country_code(CountryCode(self.config_country_code))
+
+        # If args aren't provided, use the defaults, which can be set in the
+        # config.
+        if association_mechanism is None:
+            association_mechanism = self.default_association_mechanism
+        if preserve_saved_networks is None:
+            preserve_saved_networks = self.default_preserve_saved_networks
+
+        if association_mechanism not in {None, "policy", "drivers"}:
+            raise FuchsiaDeviceError(
+                f"Invalid FuchsiaDevice association_mechanism: {association_mechanism}"
+            )
+
+        # Allows for wlan to be set up differently in different tests
+        if self.association_mechanism:
+            self.log.info("Deconfiguring WLAN")
+            self.deconfigure_wlan()
+
+        self.association_mechanism = association_mechanism
+
+        self.log.info(
+            "Configuring WLAN w/ association mechanism: " f"{association_mechanism}"
+        )
+        if association_mechanism == "drivers":
+            self.log.warn(
+                "You may encounter unusual device behavior when using the "
+                "drivers directly for WLAN. This should be reserved for "
+                "debugging specific issues. Normal test runs should use the "
+                "policy layer."
+            )
+            if preserve_saved_networks:
+                self.log.warn(
+                    "Unable to preserve saved networks when using drivers "
+                    "association mechanism (requires policy layer control)."
+                )
+        else:
+            # This requires SL4F calls, so it can only happen with actual
+            # devices, not with unit tests.
+            self.wlan_policy_controller.configure_wlan(preserve_saved_networks)
+
+        # Retrieve WLAN client and AP interfaces
+        self.update_wlan_interfaces()
+
+    def deconfigure_wlan(self) -> None:
+        """
+        Stops WLAN functionality (if it has been started). Used to allow
+        different tests to use WLAN differently (e.g. some tests require using
+        wlan policy, while the abstract wlan_device can be setup to use policy
+        or drivers)
+
+        Raises:
+            FuchsiaDeviceError, if deconfigure fails.
+        """
+        if not self.association_mechanism:
+            self.log.warning("WLAN not configured before deconfigure was called.")
+            return
+        # If using policy, stop client connections. Otherwise, just clear
+        # variables.
+        if self.association_mechanism != "drivers":
+            self.wlan_policy_controller._deconfigure_wlan()
+        self.association_mechanism = None
+
+    def reboot(
+        self,
+        unreachable_timeout: int = FUCHSIA_DEFAULT_CONNECT_TIMEOUT,
+        reboot_type: str = FUCHSIA_REBOOT_TYPE_SOFT,
+        testbed_pdus: list[pdu.PduDevice] | None = None,
+    ) -> None:
+        """Reboot a FuchsiaDevice.
+
+        Soft reboots the device, verifies it becomes unreachable, then verifies
+        it comes back online. Re-initializes services so the tests can continue.
+
+        Args:
+            use_ssh: if True, use fuchsia shell command via ssh to reboot
+                instead of SL4F.
+            unreachable_timeout: time to wait for device to become unreachable.
+            reboot_type: 'soft' or 'hard'.
+            testbed_pdus: all testbed PDUs.
+
+        Raises:
+            ConnectionError, if device fails to become unreachable or fails to
+                come back up.
+        """
+        if reboot_type == FUCHSIA_REBOOT_TYPE_SOFT:
+            self.log.info("Soft rebooting")
+            self.honeydew_fd.reboot()
+
+        elif reboot_type == FUCHSIA_REBOOT_TYPE_HARD:
+            self.log.info("Hard rebooting via PDU")
+
+            # Use dmc (client of DMS, device management server) if available
+            # for rebooting the device. This tool is only available when
+            # running in Fuchsia infrastructure.
+            dmc: PowerSwitchUsingDmc | None = None
+            if self.mdns_name:
+                try:
+                    dmc = PowerSwitchUsingDmc(device_name=self.mdns_name)
+                except PowerSwitchDmcError:
+                    self.log.info("dmc not found, falling back to using PDU")
+
+            if dmc:
+                self.log.info("Killing power to FuchsiaDevice with dmc")
+                dmc.power_off()
+                self.honeydew_fd.wait_for_offline()
+
+                self.log.info("Restoring power to FuchsiaDevice with dmc")
+                dmc.power_on()
+                self.honeydew_fd.wait_for_online()
+                self.honeydew_fd.on_device_boot()
+            else:
+                # Find the matching PDU in the Mobly config.
+                if not testbed_pdus:
+                    raise AttributeError(
+                        "Testbed PDUs must be supplied to hard reboot a fuchsia_device."
+                    )
+                device_pdu, device_pdu_port = pdu.get_pdu_port_for_device(
+                    self.device_pdu_config, testbed_pdus
+                )
+
+                self.log.info("Killing power to FuchsiaDevice")
+                device_pdu.off(device_pdu_port)
+                self.honeydew_fd.wait_for_offline()
+
+                self.log.info("Restoring power to FuchsiaDevice")
+                device_pdu.on(device_pdu_port)
+                self.honeydew_fd.wait_for_online()
+                self.honeydew_fd.on_device_boot()
+
+        else:
+            raise ValueError(f"Invalid reboot type: {reboot_type}")
+
+        # Cleanup services
+        self.stop_services()
+
+        # TODO(http://b/246852449): Move configure_wlan to other controllers.
+        # If wlan was configured before reboot, it must be configured again
+        # after rebooting, as it was before reboot. No preserving should occur.
+        if self.association_mechanism:
+            pre_reboot_association_mechanism = self.association_mechanism
+            # Prevent configure_wlan from thinking it needs to deconfigure first
+            self.association_mechanism = None
+            self.configure_wlan(
+                association_mechanism=pre_reboot_association_mechanism,
+                preserve_saved_networks=False,
+            )
+
+        self.log.info("Device has rebooted")
+
+    def ping(
+        self,
+        dest_ip: str,
+        count: int = 3,
+        interval: int = 1000,
+        timeout: int = 1000,
+        size: int = 25,
+        additional_ping_params: str | None = None,
+    ) -> PingResult:
+        """Pings from a Fuchsia device to an IPv4 address or hostname
+
+        Args:
+            dest_ip: (str) The ip or hostname to ping.
+            count: (int) How many icmp packets to send.
+            interval: (int) How long to wait between pings (ms)
+            timeout: (int) How long to wait before having the icmp packet
+                timeout (ms).
+            size: (int) Size of the icmp packet.
+            additional_ping_params: (str) command option flags to
+                append to the command string
+
+        Returns:
+            A dictionary for the results of the ping.  The dictionary contains
+            the following items:
+                status: Whether the ping was successful.
+                rtt_min: The minimum round trip time of the ping.
+                rtt_max: The minimum round trip time of the ping.
+                rtt_avg: The avg round trip time of the ping.
+                stdout: The standard out of the ping command.
+                stderr: The standard error of the ping command.
+        """
+        self.log.debug(f"Pinging {dest_ip}...")
+        if not additional_ping_params:
+            additional_ping_params = ""
+
+        try:
+            ping_result = self.ssh.run(
+                f"ping -c {count} -i {interval} -t {timeout} -s {size} "
+                f"{additional_ping_params} {dest_ip}"
+            )
+        except CalledProcessError as e:
+            self.log.debug(f"Failed to ping from host: {e}")
+            return PingResult(
+                exit_status=e.returncode,
+                stdout=e.stdout.decode("utf-8"),
+                stderr=e.stderr.decode("utf-8"),
+                transmitted=None,
+                received=None,
+                time_ms=None,
+                rtt_min_ms=None,
+                rtt_avg_ms=None,
+                rtt_max_ms=None,
+                rtt_mdev_ms=None,
+            )
+
+        rtt_stats: re.Match[str] | None = None
+
+        if not ping_result.stderr:
+            rtt_lines = ping_result.stdout.decode("utf-8").split("\n")[:-1]
+            rtt_line = rtt_lines[-1]
+            rtt_stats = re.search(self.ping_rtt_match, rtt_line)
+            if rtt_stats is None:
+                raise FuchsiaDeviceError(f'Unable to parse ping output: "{rtt_line}"')
+
+        return PingResult(
+            exit_status=ping_result.returncode,
+            stdout=ping_result.stdout.decode("utf-8"),
+            stderr=ping_result.stderr.decode("utf-8"),
+            transmitted=None,
+            received=None,
+            time_ms=None,
+            rtt_min_ms=float(rtt_stats.group(1)) if rtt_stats else None,
+            rtt_avg_ms=float(rtt_stats.group(3)) if rtt_stats else None,
+            rtt_max_ms=float(rtt_stats.group(2)) if rtt_stats else None,
+            rtt_mdev_ms=None,
+        )
+
+    def clean_up(self) -> None:
+        """Cleans up the FuchsiaDevice object, releases any resources it
+        claimed, and restores saved networks if applicable. For reboots, use
+        clean_up_services only.
+
+        Note: Any exceptions thrown in this method must be caught and handled,
+        ensuring that clean_up_services is run. Otherwise, the syslog listening
+        thread will never join and will leave tests hanging.
+        """
+        # If and only if wlan is configured, and using the policy layer
+        if self.association_mechanism == "policy":
+            try:
+                self.wlan_policy_controller.clean_up()
+            except Exception as err:
+                self.log.warning(f"Unable to clean up WLAN Policy layer: {err}")
+
+        self.stop_services()
+
+        if self.package_server:
+            self.package_server.clean_up()
+
+    def get_interface_ip_addresses(self, interface: str) -> dict[str, list[str]]:
+        return get_interface_ip_addresses(self, interface)
+
+    def wait_for_ipv4_addr(self, interface: str) -> None:
+        """Checks if device has an ipv4 private address. Sleeps 1 second between
+        retries.
+
+        Args:
+            interface: name of interface from which to get ipv4 address.
+
+        Raises:
+            ConnectionError, if device does not have an ipv4 address after all
+            timeout.
+        """
+        self.log.info(
+            f"Checking for valid ipv4 addr. Retry {IP_ADDRESS_TIMEOUT} seconds."
+        )
+        timeout = time.time() + IP_ADDRESS_TIMEOUT
+        while time.time() < timeout:
+            ip_addrs = self.get_interface_ip_addresses(interface)
+
+            if len(ip_addrs["ipv4_private"]) > 0:
+                self.log.info(
+                    "Device has an ipv4 address: " f"{ip_addrs['ipv4_private'][0]}"
+                )
+                break
+            else:
+                self.log.debug(
+                    "Device does not yet have an ipv4 address...retrying in 1 "
+                    "second."
+                )
+                time.sleep(1)
+        else:
+            raise ConnectionError("Device failed to get an ipv4 address.")
+
+    def wait_for_ipv6_addr(self, interface: str) -> None:
+        """Checks if device has an ipv6 private local address. Sleeps 1 second
+        between retries.
+
+        Args:
+            interface: name of interface from which to get ipv6 address.
+
+        Raises:
+            ConnectionError, if device does not have an ipv6 address after all
+            timeout.
+        """
+        self.log.info(
+            f"Checking for valid ipv6 addr. Retry {IP_ADDRESS_TIMEOUT} seconds."
+        )
+        timeout = time.time() + IP_ADDRESS_TIMEOUT
+        while time.time() < timeout:
+            ip_addrs = self.get_interface_ip_addresses(interface)
+            if len(ip_addrs["ipv6_private_local"]) > 0:
+                self.log.info(
+                    "Device has an ipv6 private local address: "
+                    f"{ip_addrs['ipv6_private_local'][0]}"
+                )
+                break
+            else:
+                self.log.debug(
+                    "Device does not yet have an ipv6 address...retrying in 1 "
+                    "second."
+                )
+                time.sleep(1)
+        else:
+            raise ConnectionError("Device failed to get an ipv6 address.")
+
+    def stop_services(self) -> None:
+        """Stops all host-side clients to the Fuchsia device.
+
+        This is necessary whenever the device's state is unknown. These cases can be
+        found after device reboots, for example.
+        """
+        self.log.info("Stopping host device services.")
+        del self.wlan_policy_controller
+        del self.wlan_controller
+        del self.sl4f
+        del self.ssh
+
+    def take_bug_report(self) -> None:
+        """Takes a bug report on the device and stores it in a file."""
+        self.log.info(f"Taking snapshot of {self.mdns_name}")
+
+        time_stamp = logger.sanitize_filename(
+            logger.epoch_to_log_line_timestamp(utils.get_current_epoch_time())
+        )
+        out_dir = context.get_current_context().get_full_output_path()
+        out_path = os.path.join(out_dir, f"{self.mdns_name}_{time_stamp}.zip")
+
+        try:
+            with open(out_path, "wb") as file:
+                snapshot_bytes = self.ssh.run("snapshot", log_output=False).stdout
+                file.write(snapshot_bytes)
+            self.log.info(f"Snapshot saved to {out_path}")
+        except Exception as err:
+            self.log.error(f"Failed to take snapshot: {err}")
+
+    def take_bt_snoop_log(self, custom_name: str | None = None) -> None:
+        """Takes a the bt-snoop log from the device and stores it in a file
+        in a pcap format.
+        """
+        bt_snoop_path = context.get_current_context().get_full_output_path()
+        time_stamp = logger.sanitize_filename(
+            logger.epoch_to_log_line_timestamp(time.time())
+        )
+        out_name = "FuchsiaDevice%s_%s" % (
+            self.serial,
+            time_stamp.replace(" ", "_").replace(":", "-"),
+        )
+        out_name = f"{out_name}.pcap"
+        if custom_name:
+            out_name = f"{self.serial}_{custom_name}.pcap"
+        else:
+            out_name = f"{out_name}.pcap"
+        full_out_path = os.path.join(bt_snoop_path, out_name)
+        with open(full_out_path, "wb") as file:
+            pcap_bytes = self.ssh.run("bt-snoop-cli -d -f pcap").stdout
+            file.write(pcap_bytes)
diff --git a/src/antlion/controllers/fuchsia_lib/OWNERS b/packages/antlion/controllers/fuchsia_lib/OWNERS
similarity index 89%
rename from src/antlion/controllers/fuchsia_lib/OWNERS
rename to packages/antlion/controllers/fuchsia_lib/OWNERS
index 130db54..bc76ac3 100644
--- a/src/antlion/controllers/fuchsia_lib/OWNERS
+++ b/packages/antlion/controllers/fuchsia_lib/OWNERS
@@ -1,5 +1,4 @@
 chcl@google.com
-dhobsd@google.com
 haydennix@google.com
 jmbrenna@google.com
 mnck@google.com
diff --git a/src/antlion/controllers/fuchsia_lib/__init__.py b/packages/antlion/controllers/fuchsia_lib/__init__.py
similarity index 100%
rename from src/antlion/controllers/fuchsia_lib/__init__.py
rename to packages/antlion/controllers/fuchsia_lib/__init__.py
diff --git a/src/antlion/controllers/fuchsia_lib/base_lib.py b/packages/antlion/controllers/fuchsia_lib/base_lib.py
similarity index 77%
rename from src/antlion/controllers/fuchsia_lib/base_lib.py
rename to packages/antlion/controllers/fuchsia_lib/base_lib.py
index ea7f96e..1171d98 100644
--- a/src/antlion/controllers/fuchsia_lib/base_lib.py
+++ b/packages/antlion/controllers/fuchsia_lib/base_lib.py
@@ -15,12 +15,11 @@
 # limitations under the License.
 
 import json
-import socket
-
+import logging
 from typing import Any, Mapping
 from urllib.request import Request, urlopen
 
-from antlion import logger
+from mobly.logger import PrefixLoggerAdapter
 
 DEFAULT_SL4F_RESPONSE_TIMEOUT_SEC = 30
 
@@ -36,16 +35,19 @@
 class BaseLib:
     def __init__(self, addr: str, logger_tag: str) -> None:
         self.address = addr
-        self.log = logger.create_tagged_trace_logger(
-            f"SL4F | {self.address} | {logger_tag}"
+        self.log = PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"SL4F | {self.address} | {logger_tag}"
+            },
         )
 
     def send_command(
         self,
         cmd: str,
-        args: Mapping[str, Any],
-        response_timeout: int = DEFAULT_SL4F_RESPONSE_TIMEOUT_SEC,
-    ) -> Mapping[str, Any]:
+        args: Mapping[str, object] | None = None,
+        response_timeout: float = DEFAULT_SL4F_RESPONSE_TIMEOUT_SEC,
+    ) -> dict[str, Any]:
         """Builds and sends a JSON command to SL4F server.
 
         Args:
@@ -74,18 +76,14 @@
             data=data_json,
             headers={
                 "Content-Type": "application/json; charset=utf-8",
-                "Content-Length": len(data_json),
+                "Content-Length": str(len(data_json)),
             },
         )
 
-        self.log.debug(f'Sending request "{cmd}" with {args}')
-        try:
-            response = urlopen(req, timeout=response_timeout)
-        except socket.timeout as e:
-            # socket.timeout was aliased to TimeoutError in Python 3.10. For
-            # older versions of Python, we need to cast to TimeoutError to
-            # provide a version-agnostic API.
-            raise TimeoutError("socket timeout") from e
+        self.log.debug(
+            f'Sending request "{cmd}" with args: {args} with timeout {response_timeout}'
+        )
+        response = urlopen(req, timeout=response_timeout)
 
         response_body = response.read().decode("utf-8")
         try:
diff --git a/src/antlion/controllers/fuchsia_lib/lib_controllers/__init__.py b/packages/antlion/controllers/fuchsia_lib/lib_controllers/__init__.py
similarity index 100%
rename from src/antlion/controllers/fuchsia_lib/lib_controllers/__init__.py
rename to packages/antlion/controllers/fuchsia_lib/lib_controllers/__init__.py
diff --git a/packages/antlion/controllers/fuchsia_lib/lib_controllers/wlan_controller.py b/packages/antlion/controllers/fuchsia_lib/lib_controllers/wlan_controller.py
new file mode 100644
index 0000000..07d521a
--- /dev/null
+++ b/packages/antlion/controllers/fuchsia_lib/lib_controllers/wlan_controller.py
@@ -0,0 +1,135 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import time
+from dataclasses import dataclass
+
+from honeydew.affordances.connectivity.wlan.utils.types import (
+    CountryCode,
+    QueryIfaceResponse,
+    WlanMacRole,
+)
+from honeydew.fuchsia_device.fuchsia_device import FuchsiaDevice as HdFuchsiaDevice
+from mobly import logger, signals
+
+from antlion import utils
+
+TIME_TO_SLEEP_BETWEEN_RETRIES = 1
+TIME_TO_WAIT_FOR_COUNTRY_CODE = 10
+
+
+@dataclass(frozen=True)
+class WlanInterfaces:
+    """WLAN interfaces separated device type and keyed by name."""
+
+    client: dict[str, QueryIfaceResponse]
+    """Client WLAN interfaces keyed by name."""
+
+    ap: dict[str, QueryIfaceResponse]
+    """AP WLAN interfaces keyed by name."""
+
+
+class WlanControllerError(signals.ControllerError):
+    pass
+
+
+class WlanController:
+    """Contains methods related to wlan core, to be used in FuchsiaDevice object"""
+
+    def __init__(self, honeydew: HdFuchsiaDevice) -> None:
+        self.honeydew = honeydew
+        self.log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[WlanController | {self.honeydew.device_name}]",
+            },
+        )
+
+    def get_interfaces_by_role(self) -> WlanInterfaces:
+        """Retrieves WLAN interface information."""
+
+        # Retrieve WLAN interface IDs
+        wlan_iface_ids = self.honeydew.wlan.get_iface_id_list()
+        if len(wlan_iface_ids) < 1:
+            return WlanInterfaces(client={}, ap={})
+
+        # Use IDs to get WLAN interface info and mac addresses
+        wlan_ifaces_by_mac: dict[str, QueryIfaceResponse] = {}
+        for id in wlan_iface_ids:
+            result = self.honeydew.wlan.query_iface(id)
+            mac = utils.mac_address_list_to_str(bytes(result.sta_addr))
+            wlan_ifaces_by_mac[mac] = result
+
+        # Use mac addresses to query the interfaces from the netstack view,
+        # which allows us to supplement the interface information with the name,
+        # netstack_id, etc.
+
+        # TODO(http://fxb/75909): This tedium is necessary to get the interface name
+        # because only netstack has that information. The bug linked here is
+        # to reconcile some of the information between the two perspectives, at
+        # which point we can eliminate step.
+        ifaces = self.honeydew.netstack.list_interfaces()
+
+        client: dict[str, QueryIfaceResponse] = {}
+        ap: dict[str, QueryIfaceResponse] = {}
+
+        for iface in ifaces:
+            if iface.mac is None:
+                self.log.debug(f"No MAC address for iface {iface.name}")
+                continue
+
+            mac = str(iface.mac)
+            if mac in wlan_ifaces_by_mac:
+                result = wlan_ifaces_by_mac[mac]
+                match result.role:
+                    case WlanMacRole.CLIENT:
+                        client[iface.name] = result
+                    case WlanMacRole.AP:
+                        ap[iface.name] = result
+                    case _:
+                        raise ValueError(f'Unexpected WlanMacRole "{result.role}"')
+
+        return WlanInterfaces(client, ap)
+
+    def set_country_code(self, country_code: CountryCode) -> None:
+        """Sets country code through the regulatory region service and waits
+        for the code to be applied to WLAN PHY.
+
+        Args:
+            country_code: the 2 character country code to set
+
+        Raises:
+            EnvironmentError - failure to get/set regulatory region
+            ConnectionError - failure to query PHYs
+        """
+        self.log.info(f"Setting DUT country code to {country_code}")
+        self.honeydew.wlan.set_region(country_code)
+
+        self.log.info(
+            f"Verifying DUT country code was correctly set to {country_code}."
+        )
+        phy_ids_response = self.honeydew.wlan.get_phy_id_list()
+
+        end_time = time.time() + TIME_TO_WAIT_FOR_COUNTRY_CODE
+        while time.time() < end_time:
+            for id in phy_ids_response:
+                resp = self.honeydew.wlan.get_country(id)
+                if resp == country_code:
+                    return
+                time.sleep(TIME_TO_SLEEP_BETWEEN_RETRIES)
+        else:
+            raise EnvironmentError(f"Failed to set DUT country code to {country_code}.")
diff --git a/packages/antlion/controllers/fuchsia_lib/lib_controllers/wlan_policy_controller.py b/packages/antlion/controllers/fuchsia_lib/lib_controllers/wlan_policy_controller.py
new file mode 100644
index 0000000..e593269
--- /dev/null
+++ b/packages/antlion/controllers/fuchsia_lib/lib_controllers/wlan_policy_controller.py
@@ -0,0 +1,358 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import time
+from dataclasses import dataclass
+
+from honeydew.affordances.connectivity.wlan.utils.errors import HoneydewWlanError
+from honeydew.affordances.connectivity.wlan.utils.types import (
+    ConnectionState,
+    DisconnectStatus,
+    NetworkConfig,
+    NetworkState,
+    WlanClientState,
+)
+from honeydew.fuchsia_device.fuchsia_device import FuchsiaDevice as HdFuchsiaDevice
+from mobly import logger, signals
+
+from antlion.controllers.fuchsia_lib.ssh import FuchsiaSSHProvider
+
+SESSION_MANAGER_TIMEOUT_SEC = 10
+FUCHSIA_DEFAULT_WLAN_CONFIGURE_TIMEOUT = 30
+DEFAULT_GET_UPDATE_TIMEOUT = 60
+
+
+class WlanPolicyControllerError(signals.ControllerError):
+    pass
+
+
+@dataclass
+class PreservedState:
+    saved_networks: list[NetworkConfig] | None
+    client_connections_state: WlanClientState | None
+
+
+@dataclass
+class ClientState:
+    state: str
+    networks: list[dict[str, object]]
+
+
+# TODO(http://b/309854439): Add a ClientStateWatcher and refactor tests to allow test
+# developers more control when update listeners are set and the client update state is
+# reset.
+class WlanPolicyController:
+    """Contains methods related to the wlan policy layer, to be used in the
+    FuchsiaDevice object."""
+
+    def __init__(self, honeydew: HdFuchsiaDevice, ssh: FuchsiaSSHProvider) -> None:
+        self.preserved_networks_and_client_state: PreservedState | None = None
+        self.policy_configured = False
+        self.honeydew = honeydew
+        self.ssh = ssh
+        self.log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[WlanPolicyController | {self.ssh.config.host_name}]",
+            },
+        )
+
+    def configure_wlan(
+        self,
+        preserve_saved_networks: bool,
+        timeout_sec: int = FUCHSIA_DEFAULT_WLAN_CONFIGURE_TIMEOUT,
+    ) -> None:
+        """Sets up wlan policy layer.
+
+        Args:
+            preserve_saved_networks: whether to clear existing saved
+                networks and client state, to be restored at test close.
+            timeout_sec: time to wait for device to configure WLAN.
+        """
+
+        # We need to stop session manager to free control of
+        # fuchsia.wlan.policy.ClientController, which can only be used by a
+        # single caller at a time. Fuchsia Controller needs the ClientController
+        # to trigger WLAN policy state changes. On eng builds the
+        # session_manager can be restarted after being stopped during reboot so
+        # we attempt killing the session manager process for 10 seconds.
+        # See https://cs.opensource.google/fuchsia/fuchsia/+/main:sdk/fidl/fuchsia.wlan.policy/client_provider.fidl
+        if b"cast_agent.cm" in self.ssh.run("ps").stdout:
+            session_manager_expiration = time.time() + SESSION_MANAGER_TIMEOUT_SEC
+            while time.time() < session_manager_expiration:
+                self.ssh.stop_component("session_manager", is_cfv2_component=True)
+
+        # Acquire control of policy layer
+        self.honeydew.wlan_policy.create_client_controller()
+        self.log.info("ACTS tests now have control of the WLAN policy layer.")
+
+        if preserve_saved_networks and not self.preserved_networks_and_client_state:
+            self.preserved_networks_and_client_state = (
+                self.remove_and_preserve_networks_and_client_state()
+            )
+
+        self.honeydew.wlan_policy.start_client_connections()
+        self.policy_configured = True
+
+    def _deconfigure_wlan(self) -> None:
+        self.honeydew.wlan_policy.stop_client_connections()
+        self.policy_configured = False
+
+    def clean_up(self) -> None:
+        if self.preserved_networks_and_client_state is not None:
+            # It is possible for policy to have been configured before, but
+            # deconfigured before test end. In this case, in must be setup
+            # before restoring networks
+            if not self.policy_configured:
+                self.configure_wlan(False)
+
+        self.restore_preserved_networks_and_client_state()
+
+    def _find_network(
+        self, ssid: str, networks: list[NetworkState]
+    ) -> NetworkState | None:
+        """Helper method to find network in list of network states.
+
+        Args:
+            ssid: The network name to look for.
+            networks: The list of network states to look in.
+
+        Returns:
+            Network state of target ssid or None if not found in networks.
+        """
+        for network in networks:
+            if network.network_identifier.ssid == ssid:
+                return network
+        return None
+
+    def wait_for_network_state(
+        self,
+        ssid: str,
+        expected_states: ConnectionState | set[ConnectionState],
+        expected_status: DisconnectStatus | None = None,
+        timeout_sec: int = DEFAULT_GET_UPDATE_TIMEOUT,
+    ) -> ConnectionState:
+        """Waits until the device returns with expected network state.
+
+        Args:
+            ssid: The network name to check the state of.
+            expected_states: The network state or states we are expecting to see.
+            expected_status: The disconnect status of the network. Only relevant when
+                expected_state is FAILED or DISCONNECTED.
+            timeout_sec: The number of seconds to wait for a update showing connection.
+
+        Returns:
+            Current network state if network converges on one of the expected states.
+
+        Raises:
+            TypeError: If DisconnectStatus provided with a CONNECTING or CONNECTED
+                state.
+            WlanPolicyControllerError: If no network is found before timeout or fails to
+                converge to one of the expected states.
+        """
+
+        if not isinstance(expected_states, set):
+            expected_states = {expected_states}
+
+        if (
+            expected_states == {ConnectionState.CONNECTING, ConnectionState.CONNECTED}
+            or expected_states.issubset(
+                {ConnectionState.CONNECTING, ConnectionState.CONNECTED}
+            )
+            and expected_status is not None
+        ):
+            raise TypeError(
+                "Disconnect status not valid for CONNECTING or CONNECTED states."
+            )
+
+        self.honeydew.wlan_policy.set_new_update_listener()
+        network: NetworkState | None = None
+
+        end_time = time.time() + timeout_sec
+        while time.time() < end_time:
+            time_left = max(1.0, end_time - time.time())
+            try:
+                client = self.honeydew.wlan_policy.get_update(timeout=time_left)
+            except TimeoutError as e:
+                self.log.debug("Timeout waiting for WLAN state updates: %s", e)
+                continue
+
+            # If we don't find the network initially, wait and retry.
+            network = self._find_network(ssid, client.networks)
+            if network is None:
+                self.log.debug(
+                    f"{ssid} not found in client networks: {client.networks}"
+                )
+                continue
+
+            if network.connection_state in expected_states:
+                # Check optional disconnect status matches.
+                if expected_status:
+                    if network.disconnect_status is not expected_status:
+                        raise WlanPolicyControllerError(
+                            f"Disconnect status is not {expected_status}"
+                        )
+            elif network.connection_state is ConnectionState.CONNECTING:
+                self.log.debug(f"Network {ssid} still attempting to connect.")
+                continue
+            else:
+                raise WlanPolicyControllerError(
+                    f'Expected network "{ssid}" to be in state {expected_states}, '
+                    f"got {network.connection_state}"
+                )
+
+            # Successfully converged on expected state and status
+            return network.connection_state
+
+        if network is None:
+            raise WlanPolicyControllerError(f"Timed out trying to find ssid: {ssid}")
+        raise WlanPolicyControllerError(
+            f'Timed out waiting for "{ssid}" to reach state {expected_states} and '
+            f"status {expected_status}"
+        )
+
+    def wait_for_client_state(
+        self,
+        expected_state: WlanClientState,
+        timeout_sec: int = DEFAULT_GET_UPDATE_TIMEOUT,
+    ) -> None:
+        """Waits until the client converges to expected state.
+
+        Args:
+            expected_state: The client state we are waiting to see.
+            timeout_sec: Duration to wait for the desired_state.
+
+        Raises:
+            WlanPolicyControllerError: If client still has not converged to expected
+                state at end of timeout.
+        """
+        self.honeydew.wlan_policy.set_new_update_listener()
+
+        last_err: TimeoutError | None = None
+        end_time = time.time() + timeout_sec
+        while time.time() < end_time:
+            time_left = max(1, int(end_time - time.time()))
+            try:
+                client = self.honeydew.wlan_policy.get_update(timeout=time_left)
+            except TimeoutError as e:
+                last_err = e
+                continue
+            if client.state is not expected_state:
+                # Continue getting updates.
+                continue
+            else:
+                return
+        else:
+            self.log.error(
+                f"Client state did not converge to the expected state: {expected_state}"
+                f" Waited:{timeout_sec}s"
+            )
+            raise WlanPolicyControllerError from last_err
+
+    def wait_for_no_connections(
+        self, timeout_sec: int = DEFAULT_GET_UPDATE_TIMEOUT
+    ) -> None:
+        """Waits to see that there are no connections to the device.
+
+        Args:
+            timeout_sec: The time to wait to see no connections.
+
+        Raises:
+            WlanPolicyControllerError: If client update has no networks or if client
+                still has connections at end of timeout.
+        """
+        self.honeydew.wlan_policy.set_new_update_listener()
+
+        last_err: TimeoutError | None = None
+        end_time = time.time() + timeout_sec
+        while time.time() < end_time:
+            curr_connected_networks: list[NetworkState] = []
+            time_left = max(1, int(end_time - time.time()))
+            try:
+                client = self.honeydew.wlan_policy.get_update(timeout=time_left)
+            except TimeoutError as e:
+                # Retry to handle the cases in negative testing where we expect
+                # to receive an 'error'.
+                last_err = e
+                continue
+
+            # Iterate through networks checking to see if any are still connected.
+            for network in client.networks:
+                if network.connection_state in {
+                    ConnectionState.CONNECTING,
+                    ConnectionState.CONNECTED,
+                }:
+                    curr_connected_networks.append(network)
+
+            if len(curr_connected_networks) != 0:
+                # Continue getting updates.
+                continue
+            else:
+                return
+
+        self.log.error(f"Networks still connected. Waited: {timeout_sec}s")
+        raise WlanPolicyControllerError from last_err
+
+    def remove_and_preserve_networks_and_client_state(self) -> PreservedState:
+        """Preserves networks already saved on devices before removing them.
+
+        This method is used to set up a clean test environment. Records the state of
+        client connections before tests.
+
+        Returns:
+            PreservedState: State of the client containing NetworkConfigs and client
+                connection state.
+        """
+        client = self.honeydew.wlan_policy.get_update()
+        networks = self.honeydew.wlan_policy.get_saved_networks()
+        self.honeydew.wlan_policy.remove_all_networks()
+        self.log.info("Saved networks cleared and preserved.")
+        return PreservedState(
+            saved_networks=networks, client_connections_state=client.state
+        )
+
+    def restore_preserved_networks_and_client_state(self) -> None:
+        """Restore preserved networks and client state onto device."""
+        if self.preserved_networks_and_client_state is None:
+            self.log.info("No preserved networks or client state to restore")
+            return
+
+        self.honeydew.wlan_policy.remove_all_networks()
+
+        saved_networks = self.preserved_networks_and_client_state.saved_networks
+        if saved_networks is not None:
+            for network in saved_networks:
+                try:
+                    self.honeydew.wlan_policy.save_network(
+                        network.ssid,
+                        network.security_type,
+                        network.credential_value,
+                    )
+                except HoneydewWlanError as e:
+                    self.log.warning(
+                        'Failed to restore network "%s": %s', network.ssid, e
+                    )
+
+        client_state = self.preserved_networks_and_client_state.client_connections_state
+        if client_state is not None:
+            if client_state is WlanClientState.CONNECTIONS_ENABLED:
+                self.honeydew.wlan_policy.start_client_connections()
+            else:
+                self.honeydew.wlan_policy.stop_client_connections()
+
+        self.log.info("Preserved networks and client state restored.")
+        self.preserved_networks_and_client_state = None
diff --git a/src/antlion/controllers/fuchsia_lib/package_server.py b/packages/antlion/controllers/fuchsia_lib/package_server.py
similarity index 89%
rename from src/antlion/controllers/fuchsia_lib/package_server.py
rename to packages/antlion/controllers/fuchsia_lib/package_server.py
index d497e96..96cfbf8 100644
--- a/src/antlion/controllers/fuchsia_lib/package_server.py
+++ b/packages/antlion/controllers/fuchsia_lib/package_server.py
@@ -15,25 +15,23 @@
 # limitations under the License.
 
 import json
+import logging
 import os
 import shutil
 import socket
 import subprocess
 import tarfile
 import tempfile
-
 from dataclasses import dataclass
 from datetime import datetime
-from typing import TextIO, List, Optional
+from typing import TextIO
 
-from antlion import context
-from antlion import logger
-from antlion import signals
-from antlion import utils
+from mobly import logger, signals
 
-from antlion.controllers.fuchsia_lib.ssh import SSHError, SSHProvider
+from antlion import context, utils
+from antlion.controllers.fuchsia_lib.ssh import SSHProvider
 from antlion.net import wait_for_port
-from antlion.tracelogger import TraceLogger
+from antlion.runner import CalledProcessError
 
 DEFAULT_FUCHSIA_REPO_NAME = "fuchsia.com"
 PM_SERVE_STOP_TIMEOUT_SEC = 5
@@ -46,17 +44,17 @@
 def random_port() -> int:
     s = socket.socket()
     s.bind(("", 0))
-    return s.getsockname()[1]
+    return int(s.getsockname()[1])
 
 
 @dataclass
 class Route:
     """Represent a route in the routing table."""
 
-    preferred_source: Optional[str]
+    preferred_source: str | None
 
 
-def find_routes_to(dest_ip) -> List[Route]:
+def find_routes_to(dest_ip) -> list[Route]:
     """Find the routes used to reach a destination.
 
     Look through the routing table for the routes that would be used without
@@ -124,11 +122,16 @@
             packages_archive_path: Path to an archive containing the pm binary
                 and amber-files.
         """
-        self.log: TraceLogger = logger.create_tagged_trace_logger("pm")
+        self.log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: "[pm]",
+            },
+        )
 
-        self._server_log: Optional[TextIO] = None
-        self._server_proc: Optional[subprocess.Popen] = None
-        self._log_path: Optional[str] = None
+        self._server_log: TextIO | None = None
+        self._server_proc: subprocess.Popen | None = None
+        self._log_path: str | None = None
 
         self._tmp_dir = tempfile.mkdtemp(prefix="packages-")
         tar = tarfile.open(packages_archive_path, "r:gz")
@@ -213,9 +216,9 @@
         """
         # Remove any existing repositories that may be stale.
         try:
-            ssh.run(f"pkgctl repo rm fuchsia-pkg://{repo_name}")
-        except SSHError as e:
-            if "NOT_FOUND" not in e.result.stderr:
+            ssh.run(["pkgctl", "repo", "rm", f"fuchsia-pkg://{repo_name}"])
+        except CalledProcessError as e:
+            if b"NOT_FOUND" not in e.stderr:
                 raise e
 
         # Configure the device with the new repository.
diff --git a/packages/antlion/controllers/fuchsia_lib/sl4f.py b/packages/antlion/controllers/fuchsia_lib/sl4f.py
new file mode 100644
index 0000000..f8ab1a7
--- /dev/null
+++ b/packages/antlion/controllers/fuchsia_lib/sl4f.py
@@ -0,0 +1,91 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import ipaddress
+import logging
+
+from mobly import logger
+
+from antlion.controllers.fuchsia_lib.ssh import FuchsiaSSHProvider
+from antlion.controllers.fuchsia_lib.wlan_deprecated_configuration_lib import (
+    FuchsiaWlanDeprecatedConfigurationLib,
+)
+from antlion.net import wait_for_port
+from antlion.runner import CalledProcessError
+
+DEFAULT_SL4F_PORT = 80
+START_SL4F_V2_CMD = "start_sl4f"
+
+
+class SL4F:
+    """Module for Fuchsia devices to interact with the SL4F tool.
+
+    Attributes:
+        ssh: Transport to start and stop SL4F.
+        address: http address for SL4F server including SL4F port.
+        log: Logger for the device-specific instance of SL4F.
+    """
+
+    def __init__(
+        self,
+        ssh: FuchsiaSSHProvider,
+        port: int = DEFAULT_SL4F_PORT,
+    ) -> None:
+        """
+        Args:
+            ssh: Transport to start and stop SL4F.
+            port: Port for the SL4F server to listen on.
+        """
+        ip = ipaddress.ip_address(ssh.config.host_name)
+        if ip.version == 4:
+            self.address = f"http://{ip}:{port}"
+        elif ip.version == 6:
+            self.address = f"http://[{ip}]:{port}"
+
+        self.log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[SL4F | {self.address}]",
+            },
+        )
+
+        try:
+            ssh.stop_component("sl4f")
+            ssh.run(START_SL4F_V2_CMD).stdout
+        except CalledProcessError:
+            # TODO(fxbug.dev/42181764) Remove support to run SL4F in CFv1 mode
+            # once ACTS no longer use images that comes with only CFv1 SL4F.
+            self.log.warn(
+                "Running SL4F in CFv1 mode, "
+                "this is deprecated for images built after 5/9/2022, "
+                "see https://fxbug.dev/42157029 for more info."
+            )
+            ssh.stop_component("sl4f")
+            ssh.start_v1_component("sl4f")
+
+        try:
+            wait_for_port(ssh.config.host_name, port)
+            self.log.info("SL4F server is reachable")
+        except TimeoutError as e:
+            raise TimeoutError("SL4F server is unreachable") from e
+
+        self._init_libraries()
+
+    def _init_libraries(self) -> None:
+        # Grabs command from FuchsiaWlanDeprecatedConfigurationLib
+        self.wlan_deprecated_configuration_lib = FuchsiaWlanDeprecatedConfigurationLib(
+            self.address
+        )
diff --git a/src/antlion/controllers/fuchsia_lib/ssh.py b/packages/antlion/controllers/fuchsia_lib/ssh.py
similarity index 86%
rename from src/antlion/controllers/fuchsia_lib/ssh.py
rename to packages/antlion/controllers/fuchsia_lib/ssh.py
index 1d1f421..94e2001 100644
--- a/src/antlion/controllers/fuchsia_lib/ssh.py
+++ b/packages/antlion/controllers/fuchsia_lib/ssh.py
@@ -16,7 +16,8 @@
 
 import time
 
-from antlion.capabilities.ssh import SSHError, SSHProvider
+from antlion.capabilities.ssh import SSHProvider
+from antlion.runner import CalledProcessError
 
 DEFAULT_SSH_USER: str = "fuchsia"
 DEFAULT_SSH_PRIVATE_KEY: str = "~/.ssh/fuchsia_ed25519"
@@ -45,15 +46,16 @@
         """
         # The "run -d" command will hang when executed without a pseudo-tty
         # allocated.
+        self.config.force_tty = True
         self.run(
             f"run -d fuchsia-pkg://{repo}/{component}#meta/{component}.cmx",
-            force_tty=True,
         )
+        self.config.force_tty = False
 
         timeout = time.perf_counter() + timeout_sec
         while True:
             ps_cmd = self.run("ps")
-            if f"{component}.cmx" in ps_cmd.stdout:
+            if f"{component}.cmx" in ps_cmd.stdout.decode("utf-8"):
                 return
             if time.perf_counter() > timeout:
                 raise TimeoutError(
@@ -70,10 +72,10 @@
         suffix = "cm" if is_cfv2_component else "cmx"
 
         try:
-            self.run(f"killall {component}.{suffix}")
+            self.run(["killall", f"{component}.{suffix}"])
             self.log.info(f"Stopped component: {component}.{suffix}")
-        except SSHError as e:
-            if "no tasks found" in e.result.stderr:
+        except CalledProcessError as e:
+            if b"no tasks found" in e.stderr:
                 self.log.debug(f"Could not find component: {component}.{suffix}")
                 return
             raise e
diff --git a/src/antlion/controllers/fuchsia_lib/wlan_deprecated_configuration_lib.py b/packages/antlion/controllers/fuchsia_lib/wlan_deprecated_configuration_lib.py
similarity index 97%
rename from src/antlion/controllers/fuchsia_lib/wlan_deprecated_configuration_lib.py
rename to packages/antlion/controllers/fuchsia_lib/wlan_deprecated_configuration_lib.py
index a53698b..df3f66e 100644
--- a/src/antlion/controllers/fuchsia_lib/wlan_deprecated_configuration_lib.py
+++ b/packages/antlion/controllers/fuchsia_lib/wlan_deprecated_configuration_lib.py
@@ -14,7 +14,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from antlion import logger
 from antlion.controllers.fuchsia_lib.base_lib import BaseLib
 
 
diff --git a/src/antlion/controllers/iperf_client.py b/packages/antlion/controllers/iperf_client.py
similarity index 67%
rename from src/antlion/controllers/iperf_client.py
rename to packages/antlion/controllers/iperf_client.py
index 9ad6efc..a24330d 100644
--- a/src/antlion/controllers/iperf_client.py
+++ b/packages/antlion/controllers/iperf_client.py
@@ -14,31 +14,32 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import annotations
+
 import logging
 import os
-import subprocess
 import socket
+import subprocess
 import threading
+from abc import ABC, abstractmethod
 
 from antlion import context
-from antlion import utils
+from antlion.capabilities.ssh import SSHConfig
 from antlion.controllers.adb_lib.error import AdbCommandError
 from antlion.controllers.android_device import AndroidDevice
 from antlion.controllers.fuchsia_lib.ssh import SSHProvider
-from antlion.controllers.iperf_server import _AndroidDeviceBridge
-from antlion.controllers.utils_lib.ssh import connection
-from antlion.controllers.utils_lib.ssh import settings
-from antlion.libs.proc import job
+from antlion.controllers.utils_lib.commands.date import LinuxDateCommand
+from antlion.types import ControllerConfig, Json
+from antlion.validation import MapValidator
 
-MOBLY_CONTROLLER_CONFIG_NAME = "IPerfClient"
-ACTS_CONTROLLER_REFERENCE_NAME = "iperf_clients"
+MOBLY_CONTROLLER_CONFIG_NAME: str = "IPerfClient"
 
 
 class IPerfError(Exception):
     """Raised on execution errors of iPerf."""
 
 
-def create(configs):
+def create(configs: list[ControllerConfig]) -> list[IPerfClientBase]:
     """Factory method for iperf clients.
 
     The function creates iperf clients based on at least one config.
@@ -49,18 +50,15 @@
     Args:
         configs: config parameters for the iperf server
     """
-    results = []
-    for c in configs:
-        if type(c) is dict and "AndroidDevice" in c:
-            results.append(
-                IPerfClientOverAdb(
-                    c["AndroidDevice"], test_interface=c.get("test_interface")
-                )
-            )
-        elif type(c) is dict and "ssh_config" in c:
+    results: list[IPerfClientBase] = []
+    for config in configs:
+        c = MapValidator(config)
+        if "ssh_config" in config:
             results.append(
                 IPerfClientOverSsh(
-                    c["ssh_config"], test_interface=c.get("test_interface")
+                    SSHProvider(SSHConfig.from_config(c.get(dict, "ssh_config"))),
+                    test_interface=c.get(str, "test_interface"),
+                    sync_date=True,
                 )
             )
         else:
@@ -68,21 +66,20 @@
     return results
 
 
-def get_info(iperf_clients):
-    """Placeholder for info about iperf clients
-
-    Returns:
-        None
-    """
-    return None
-
-
-def destroy(_):
+def destroy(objects: list[IPerfClientBase]) -> None:
     # No cleanup needed.
     pass
 
 
-class IPerfClientBase(object):
+def get_info(objects: list[IPerfClientBase]) -> list[Json]:
+    return []
+
+
+class RouteNotFound(ConnectionError):
+    """Failed to find a route to the iperf server."""
+
+
+class IPerfClientBase(ABC):
     """The Base class for all IPerfClients.
 
     This base class is responsible for synchronizing the logging to prevent
@@ -96,8 +93,19 @@
 
     __log_file_lock = threading.Lock()
 
+    @property
+    @abstractmethod
+    def test_interface(self) -> str | None:
+        """Find the test interface.
+
+        Returns:
+            Name of the interface used to communicate with server_ap, or None if
+            not set.
+        """
+        ...
+
     @staticmethod
-    def _get_full_file_path(tag=""):
+    def _get_full_file_path(tag: str = "") -> str:
         """Returns the full file path for the IPerfClient log file.
 
         Note: If the directory for the file path does not exist, it will be
@@ -121,7 +129,14 @@
 
         return os.path.join(full_out_dir, out_file_name)
 
-    def start(self, ip, iperf_args, tag, timeout=3600, iperf_binary=None):
+    def start(
+        self,
+        ip: str,
+        iperf_args: str,
+        tag: str,
+        timeout: int = 3600,
+        iperf_binary: str | None = None,
+    ) -> str:
         """Starts iperf client, and waits for completion.
 
         Args:
@@ -142,7 +157,18 @@
 class IPerfClient(IPerfClientBase):
     """Class that handles iperf3 client operations."""
 
-    def start(self, ip, iperf_args, tag, timeout=3600, iperf_binary=None):
+    @property
+    def test_interface(self) -> str | None:
+        return None
+
+    def start(
+        self,
+        ip: str,
+        iperf_args: str,
+        tag: str,
+        timeout: int = 3600,
+        iperf_binary: str | None = None,
+    ) -> str:
         """Starts iperf client, and waits for completion.
 
         Args:
@@ -163,7 +189,7 @@
             )
             iperf_binary = "iperf3"
         else:
-            logging.debug("Using iperf3 binary located at %s" % iperf_binary)
+            logging.debug(f"Using iperf3 binary located at {iperf_binary}")
         iperf_cmd = [str(iperf_binary), "-c", ip] + iperf_args.split(" ")
         full_out_path = self._get_full_file_path(tag)
 
@@ -178,28 +204,30 @@
 
     def __init__(
         self,
-        ssh_config: str,
-        test_interface: str = None,
-        ssh_provider: SSHProvider = None,
+        ssh_provider: SSHProvider,
+        test_interface: str | None = None,
+        sync_date: bool = True,
     ):
         self._ssh_provider = ssh_provider
-        if not self._ssh_provider:
-            self._ssh_settings = settings.from_config(ssh_config)
-            if not (
-                utils.is_valid_ipv4_address(self._ssh_settings.hostname)
-                or utils.is_valid_ipv6_address(self._ssh_settings.hostname)
-            ):
-                mdns_ip = utils.get_fuchsia_mdns_ipv6_address(
-                    self._ssh_settings.hostname
-                )
-                if mdns_ip:
-                    self._ssh_settings.hostname = mdns_ip
-        self._ssh_session = None
-        self.start_ssh()
+        self._test_interface = test_interface
 
-        self.test_interface = test_interface
+        if sync_date:
+            # iperf clients are not given internet access, so their system time
+            # needs to be manually set to be accurate.
+            LinuxDateCommand(self._ssh_provider).sync()
 
-    def start(self, ip, iperf_args, tag, timeout=3600, iperf_binary=None):
+    @property
+    def test_interface(self) -> str | None:
+        return self._test_interface
+
+    def start(
+        self,
+        ip: str,
+        iperf_args: str,
+        tag: str,
+        timeout: int = 3600,
+        iperf_binary: str | None = None,
+    ) -> str:
         """Starts iperf client, and waits for completion.
 
         Args:
@@ -220,49 +248,31 @@
             )
             iperf_binary = "iperf3"
         else:
-            logging.debug("Using iperf3 binary located at %s" % iperf_binary)
-        iperf_cmd = "{} -c {} {}".format(iperf_binary, ip, iperf_args)
+            logging.debug(f"Using iperf3 binary located at {iperf_binary}")
+        iperf_cmd = f"{iperf_binary} -c {ip} {iperf_args}"
         full_out_path = self._get_full_file_path(tag)
 
         try:
-            self.start_ssh()
-            if self._ssh_provider:
-                iperf_process = self._ssh_provider.run(iperf_cmd, timeout_sec=timeout)
-            else:
-                iperf_process = self._ssh_session.run(iperf_cmd, timeout=timeout)
+            iperf_process = self._ssh_provider.run(iperf_cmd, timeout_sec=timeout)
             iperf_output = iperf_process.stdout
-            with open(full_out_path, "w") as out_file:
+            with open(full_out_path, "wb") as out_file:
                 out_file.write(iperf_output)
         except socket.timeout:
             raise TimeoutError(
                 "Socket timeout. Timed out waiting for iperf " "client to finish."
             )
         except Exception as err:
-            logging.exception("iperf run failed: {}".format(err))
+            logging.exception(f"iperf run failed: {err}")
 
         return full_out_path
 
-    def start_ssh(self):
-        """Starts an ssh session to the iperf client."""
-        if self._ssh_provider:
-            # SSH sessions are created by the provider.
-            return
-        if not self._ssh_session:
-            self._ssh_session = connection.SshConnection(self._ssh_settings)
-
-    def close_ssh(self):
-        """Closes the ssh session to the iperf client, if one exists, preventing
-        connection reset errors when rebooting client device.
-        """
-        if self._ssh_session:
-            self._ssh_session.close()
-            self._ssh_session = None
-
 
 class IPerfClientOverAdb(IPerfClientBase):
     """Class that handles iperf3 operations over ADB devices."""
 
-    def __init__(self, android_device_or_serial, test_interface=None):
+    def __init__(
+        self, android_device: AndroidDevice, test_interface: str | None = None
+    ):
         """Creates a new IPerfClientOverAdb object.
 
         Args:
@@ -273,19 +283,21 @@
             test_interface: The network interface that will be used to send
                 traffic to the iperf server.
         """
-        self._android_device_or_serial = android_device_or_serial
-        self.test_interface = test_interface
+        self._android_device = android_device
+        self._test_interface = test_interface
 
     @property
-    def _android_device(self):
-        if isinstance(self._android_device_or_serial, AndroidDevice):
-            return self._android_device_or_serial
-        else:
-            return _AndroidDeviceBridge.android_devices()[
-                self._android_device_or_serial
-            ]
+    def test_interface(self) -> str | None:
+        return self._test_interface
 
-    def start(self, ip, iperf_args, tag, timeout=3600, iperf_binary=None):
+    def start(
+        self,
+        ip: str,
+        iperf_args: str,
+        tag: str,
+        timeout: int = 3600,
+        iperf_binary: str | None = None,
+    ) -> str:
         """Starts iperf client, and waits for completion.
 
         Args:
@@ -308,13 +320,13 @@
                 )
                 iperf_binary = "iperf3"
             else:
-                logging.debug("Using iperf3 binary located at %s" % iperf_binary)
-            iperf_cmd = "{} -c {} {}".format(iperf_binary, ip, iperf_args)
+                logging.debug(f"Using iperf3 binary located at {iperf_binary}")
+            iperf_cmd = f"{iperf_binary} -c {ip} {iperf_args}"
             out = self._android_device.adb.shell(str(iperf_cmd), timeout=timeout)
             clean_out = out.split("\n")
             if "error" in clean_out[0].lower():
                 raise IPerfError(clean_out)
-        except (job.TimeoutError, AdbCommandError):
+        except (subprocess.TimeoutExpired, AdbCommandError):
             logging.warning("TimeoutError: Iperf measurement failed.")
 
         full_out_path = self._get_full_file_path(tag)
diff --git a/packages/antlion/controllers/iperf_server.py b/packages/antlion/controllers/iperf_server.py
new file mode 100755
index 0000000..adcc833
--- /dev/null
+++ b/packages/antlion/controllers/iperf_server.py
@@ -0,0 +1,636 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+import json
+import logging
+import math
+import os
+import shlex
+import subprocess
+import threading
+import time
+from typing import IO
+
+from mobly import logger, signals
+
+from antlion import context, utils
+from antlion.controllers.utils_lib.commands import nmcli
+from antlion.controllers.utils_lib.commands.command import optional, require
+from antlion.controllers.utils_lib.commands.journalctl import LinuxJournalctlCommand
+from antlion.controllers.utils_lib.ssh import connection, settings
+from antlion.libs.proc import job
+from antlion.types import ControllerConfig, Json
+from antlion.validation import MapValidator
+
+MOBLY_CONTROLLER_CONFIG_NAME: str = "IPerfServer"
+KILOBITS = 1024
+MEGABITS = KILOBITS * 1024
+GIGABITS = MEGABITS * 1024
+BITS_IN_BYTE = 8
+
+
+def create(
+    configs: list[ControllerConfig],
+) -> list[IPerfServer | IPerfServerOverSsh]:
+    """Factory method for iperf servers.
+
+    The function creates iperf servers based on at least one config.
+    If configs only specify a port number, a regular local IPerfServer object
+    will be created. If configs contains ssh settings or and AndroidDevice,
+    remote iperf servers will be started on those devices
+
+    Args:
+        configs: config parameters for the iperf server
+    """
+    results: list[IPerfServer | IPerfServerOverSsh] = []
+    for c in configs:
+        if isinstance(c, (str, int)) and str(c).isdigit():
+            results.append(IPerfServer(int(c)))
+        elif isinstance(c, dict) and "ssh_config" in c and "port" in c:
+            config = MapValidator(c)
+            results.append(
+                IPerfServerOverSsh(
+                    settings.from_config(config.get(dict, "ssh_config")),
+                    config.get(int, "port"),
+                    test_interface=config.get(str, "test_interface"),
+                    use_killall=config.get(bool, "use_killall", False),
+                )
+            )
+        else:
+            raise ValueError(
+                f"Config entry {c} in {configs} is not a valid IPerfServer config."
+            )
+    return results
+
+
+def destroy(
+    objects: list[IPerfServer | IPerfServerOverSsh],
+) -> None:
+    for iperf_server in objects:
+        try:
+            iperf_server.stop()
+        except Exception:
+            logging.exception(f"Unable to properly clean up {iperf_server}.")
+
+
+def get_info(
+    objects: list[IPerfServer | IPerfServerOverSsh],
+) -> list[Json]:
+    return []
+
+
+class IPerfResult(object):
+    def __init__(self, result_path, reporting_speed_units="Mbytes"):
+        """Loads iperf result from file.
+
+        Loads iperf result from JSON formatted server log. File can be accessed
+        before or after server is stopped. Note that only the first JSON object
+        will be loaded and this funtion is not intended to be used with files
+        containing multiple iperf client runs.
+        """
+        # if result_path isn't a path, treat it as JSON
+        self.reporting_speed_units = reporting_speed_units
+        if not os.path.exists(result_path):
+            self.result = json.loads(result_path)
+        else:
+            try:
+                with open(result_path, "r") as f:
+                    iperf_output = f.readlines()
+                    if "}\n" in iperf_output:
+                        iperf_output = iperf_output[: iperf_output.index("}\n") + 1]
+                    iperf_string = "".join(iperf_output)
+                    iperf_string = iperf_string.replace("nan", "0")
+                    self.result = json.loads(iperf_string)
+            except ValueError:
+                with open(result_path, "r") as f:
+                    # Possibly a result from interrupted iperf run,
+                    # skip first line and try again.
+                    lines = f.readlines()[1:]
+                    self.result = json.loads("".join(lines))
+
+    def _has_data(self):
+        """Checks if the iperf result has valid throughput data.
+
+        Returns:
+            True if the result contains throughput data. False otherwise.
+        """
+        return ("end" in self.result) and (
+            "sum_received" in self.result["end"] or "sum" in self.result["end"]
+        )
+
+    def _get_reporting_speed(
+        self, network_speed_in_bits_per_second: int | float
+    ) -> float:
+        """Sets the units for the network speed reporting based on how the
+        object was initiated.  Defaults to Megabytes per second.  Currently
+        supported, bits per second (bits), kilobits per second (kbits), megabits
+        per second (mbits), gigabits per second (gbits), bytes per second
+        (bytes), kilobits per second (kbytes), megabits per second (mbytes),
+        gigabytes per second (gbytes).
+
+        Args:
+            network_speed_in_bits_per_second: The network speed from iperf in
+                bits per second.
+
+        Returns:
+            The value of the throughput in the appropriate units.
+        """
+        speed_divisor = 1
+        if self.reporting_speed_units[1:].lower() == "bytes":
+            speed_divisor = speed_divisor * BITS_IN_BYTE
+        if self.reporting_speed_units[0:1].lower() == "k":
+            speed_divisor = speed_divisor * KILOBITS
+        if self.reporting_speed_units[0:1].lower() == "m":
+            speed_divisor = speed_divisor * MEGABITS
+        if self.reporting_speed_units[0:1].lower() == "g":
+            speed_divisor = speed_divisor * GIGABITS
+        return network_speed_in_bits_per_second / speed_divisor
+
+    def get_json(self):
+        """Returns the raw json output from iPerf."""
+        return self.result
+
+    @property
+    def error(self):
+        return self.result.get("error", None)
+
+    @property
+    def avg_rate(self):
+        """Average UDP rate in MB/s over the entire run.
+
+        This is the average UDP rate observed at the terminal the iperf result
+        is pulled from. According to iperf3 documentation this is calculated
+        based on bytes sent and thus is not a good representation of the
+        quality of the link. If the result is not from a success run, this
+        property is None.
+        """
+        if not self._has_data() or "sum" not in self.result["end"]:
+            return None
+        bps = self.result["end"]["sum"]["bits_per_second"]
+        return self._get_reporting_speed(bps)
+
+    @property
+    def avg_receive_rate(self):
+        """Average receiving rate in MB/s over the entire run.
+
+        This data may not exist if iperf was interrupted. If the result is not
+        from a success run, this property is None.
+        """
+        if not self._has_data() or "sum_received" not in self.result["end"]:
+            return None
+        bps = self.result["end"]["sum_received"]["bits_per_second"]
+        return self._get_reporting_speed(bps)
+
+    @property
+    def avg_send_rate(self):
+        """Average sending rate in MB/s over the entire run.
+
+        This data may not exist if iperf was interrupted. If the result is not
+        from a success run, this property is None.
+        """
+        if not self._has_data() or "sum_sent" not in self.result["end"]:
+            return None
+        bps = self.result["end"]["sum_sent"]["bits_per_second"]
+        return self._get_reporting_speed(bps)
+
+    @property
+    def instantaneous_rates(self):
+        """Instantaneous received rate in MB/s over entire run.
+
+        This data may not exist if iperf was interrupted. If the result is not
+        from a success run, this property is None.
+        """
+        if not self._has_data():
+            return None
+        intervals = [
+            self._get_reporting_speed(interval["sum"]["bits_per_second"])
+            for interval in self.result["intervals"]
+        ]
+        return intervals
+
+    @property
+    def std_deviation(self):
+        """Standard deviation of rates in MB/s over entire run.
+
+        This data may not exist if iperf was interrupted. If the result is not
+        from a success run, this property is None.
+        """
+        return self.get_std_deviation(0)
+
+    def get_std_deviation(self, iperf_ignored_interval):
+        """Standard deviation of rates in MB/s over entire run.
+
+        This data may not exist if iperf was interrupted. If the result is not
+        from a success run, this property is None. A configurable number of
+        beginning (and the single last) intervals are ignored in the
+        calculation as they are inaccurate (e.g. the last is from a very small
+        interval)
+
+        Args:
+            iperf_ignored_interval: number of iperf interval to ignored in
+            calculating standard deviation
+
+        Returns:
+            The standard deviation.
+        """
+        if not self._has_data():
+            return None
+        instantaneous_rates = self.instantaneous_rates[iperf_ignored_interval:-1]
+        avg_rate = math.fsum(instantaneous_rates) / len(instantaneous_rates)
+        sqd_deviations = [(rate - avg_rate) ** 2 for rate in instantaneous_rates]
+        std_dev = math.sqrt(math.fsum(sqd_deviations) / (len(sqd_deviations) - 1))
+        return std_dev
+
+
+class IPerfServerBase(object):
+    # Keeps track of the number of IPerfServer logs to prevent file name
+    # collisions.
+    __log_file_counter = 0
+
+    __log_file_lock = threading.Lock()
+
+    def __init__(self, port: int):
+        self._port = port
+        # TODO(markdr): We shouldn't be storing the log files in an array like
+        # this. Nobody should be reading this property either. Instead, the
+        # IPerfResult should be returned in stop() with all the necessary info.
+        # See aosp/1012824 for a WIP implementation.
+        self.log_files: list[str] = []
+
+    @property
+    def port(self) -> int:
+        raise NotImplementedError("port must be specified.")
+
+    @property
+    def started(self) -> bool:
+        raise NotImplementedError("started must be specified.")
+
+    def start(self, extra_args: str = "", tag: str = "") -> None:
+        """Starts an iperf3 server.
+
+        Args:
+            extra_args: Extra arguments to start iperf server with.
+            tag: Appended to log file name to identify logs from different
+                iperf runs.
+        """
+        raise NotImplementedError("start() must be specified.")
+
+    def stop(self) -> str | None:
+        """Stops the iperf server.
+
+        Returns:
+            The name of the log file generated from the terminated session, or
+            None if iperf wasn't started or ran successfully.
+        """
+        raise NotImplementedError("stop() must be specified.")
+
+    def _get_full_file_path(self, tag: str | None = None) -> str:
+        """Returns the full file path for the IPerfServer log file.
+
+        Note: If the directory for the file path does not exist, it will be
+        created.
+
+        Args:
+            tag: The tag passed in to the server run.
+        """
+        out_dir = self.log_path
+
+        with IPerfServerBase.__log_file_lock:
+            tags = [tag, IPerfServerBase.__log_file_counter]
+            out_file_name = "IPerfServer,%s.log" % (
+                ",".join([str(x) for x in tags if x != "" and x is not None])
+            )
+            IPerfServerBase.__log_file_counter += 1
+
+        file_path = os.path.join(out_dir, out_file_name)
+        self.log_files.append(file_path)
+        return file_path
+
+    @property
+    def log_path(self) -> str:
+        current_context = context.get_current_context()
+        full_out_dir = os.path.join(
+            current_context.get_full_output_path(), f"IPerfServer{self.port}"
+        )
+
+        # Ensure the directory exists.
+        os.makedirs(full_out_dir, exist_ok=True)
+
+        return full_out_dir
+
+
+def _get_port_from_ss_output(ss_output, pid):
+    pid = str(pid)
+    lines = ss_output.split("\n")
+    for line in lines:
+        if pid in line:
+            # Expected format:
+            # tcp LISTEN  0 5 *:<PORT>  *:* users:(("cmd",pid=<PID>,fd=3))
+            return line.split()[4].split(":")[-1]
+    else:
+        raise ProcessLookupError("Could not find started iperf3 process.")
+
+
+class IPerfServer(IPerfServerBase):
+    """Class that handles iperf server commands on localhost."""
+
+    def __init__(self, port: int = 5201) -> None:
+        super().__init__(port)
+        self._hinted_port = port
+        self._current_log_file: str | None = None
+        self._iperf_process: subprocess.Popen[bytes] | None = None
+        self._last_opened_file: IO[bytes] | None = None
+
+    @property
+    def port(self) -> int:
+        return self._port
+
+    @property
+    def started(self) -> bool:
+        return self._iperf_process is not None
+
+    def start(self, extra_args: str = "", tag: str = "") -> None:
+        """Starts iperf server on local machine.
+
+        Args:
+            extra_args: A string representing extra arguments to start iperf
+                server with.
+            tag: Appended to log file name to identify logs from different
+                iperf runs.
+        """
+        if self._iperf_process is not None:
+            return
+
+        self._current_log_file = self._get_full_file_path(tag)
+
+        # Run an iperf3 server on the hinted port with JSON output.
+        command = ["iperf3", "-s", "-p", str(self._hinted_port), "-J"]
+
+        command.extend(shlex.split(extra_args))
+
+        if self._last_opened_file:
+            self._last_opened_file.close()
+        self._last_opened_file = open(self._current_log_file, "wb")
+        self._iperf_process = subprocess.Popen(
+            command, stdout=self._last_opened_file, stderr=subprocess.DEVNULL
+        )
+        for attempts_left in reversed(range(3)):
+            try:
+                self._port = int(
+                    _get_port_from_ss_output(
+                        job.run("ss -l -p -n | grep iperf").stdout,
+                        self._iperf_process.pid,
+                    )
+                )
+                break
+            except ProcessLookupError:
+                if attempts_left == 0:
+                    raise
+                logging.debug("iperf3 process not started yet.")
+                time.sleep(0.01)
+
+    def stop(self) -> str | None:
+        """Stops the iperf server.
+
+        Returns:
+            The name of the log file generated from the terminated session, or
+            None if iperf wasn't started or ran successfully.
+        """
+        if self._iperf_process is None:
+            return None
+
+        if self._last_opened_file:
+            self._last_opened_file.close()
+            self._last_opened_file = None
+
+        self._iperf_process.terminate()
+        self._iperf_process = None
+
+        return self._current_log_file
+
+    def __del__(self) -> None:
+        self.stop()
+
+
+class IPerfServerOverSsh(IPerfServerBase):
+    """Class that handles iperf3 operations on remote machines."""
+
+    def __init__(
+        self,
+        ssh_settings: settings.SshSettings,
+        port: int,
+        test_interface: str,
+        use_killall: bool = False,
+    ):
+        super().__init__(port)
+        self.test_interface = test_interface
+        self.hostname = ssh_settings.hostname
+        self.log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[IPerfServer | {self.hostname}]",
+            },
+        )
+        self._ssh_settings = ssh_settings
+        self._ssh_session: connection.SshConnection | None = connection.SshConnection(
+            ssh_settings
+        )
+        self._journalctl = require(LinuxJournalctlCommand(self._ssh_session))
+
+        self._iperf_pid: str | None = None
+        self._current_tag: str | None = None
+        self._use_killall = str(use_killall).lower() == "true"
+
+        # The control and test interfaces have to be different, otherwise
+        # performing a DHCP release+renewal risks severing the SSH connection
+        # and bricking the device.
+        control_interface = utils.get_interface_based_on_ip(
+            self._ssh_session, self.hostname
+        )
+        if control_interface == test_interface:
+            raise signals.TestAbortAll(
+                f"iperf server control interface ({control_interface}) cannot be the "
+                f"same as the test interface ({test_interface})."
+            )
+
+        # Disable NetworkManager on the test interface
+        self._nmcli = optional(nmcli.LinuxNmcliCommand(self._ssh_session))
+        if self._nmcli:
+            self._nmcli.setup_device(self.test_interface)
+
+    @property
+    def port(self) -> int:
+        return self._port
+
+    @property
+    def started(self) -> bool:
+        return self._iperf_pid is not None
+
+    def _get_remote_log_path(self) -> str:
+        return f"/tmp/iperf_server_port{self.port}.log"
+
+    def get_interface_ip_addresses(self, interface: str) -> dict[str, list[str]]:
+        """Gets all of the ip addresses, ipv4 and ipv6, associated with a
+           particular interface name.
+
+        Args:
+            interface: The interface name on the device, ie eth0
+
+        Returns:
+            A list of dictionaries of the various IP addresses. See
+            utils.get_interface_ip_addresses.
+        """
+        return utils.get_interface_ip_addresses(self._get_ssh(), interface)
+
+    def renew_test_interface_ip_address(self) -> None:
+        """Renews the test interface's IPv4 address.
+
+        Necessary for changing DHCP scopes during a test.
+        """
+        utils.renew_linux_ip_address(self._get_ssh(), self.test_interface)
+
+    def get_addr(
+        self, addr_type: str = "ipv4_private", timeout_sec: int | None = None
+    ) -> str:
+        """Wait until a type of IP address on the test interface is available
+        then return it.
+        """
+        return utils.get_addr(
+            self._get_ssh(), self.test_interface, addr_type, timeout_sec
+        )
+
+    def _cleanup_iperf_port(self) -> None:
+        """Checks and kills zombie iperf servers occupying intended port."""
+        assert self._ssh_session is not None
+
+        netstat = self._ssh_session.run(["netstat", "-tupln"]).stdout.decode("utf-8")
+        for line in netstat.splitlines():
+            if "LISTEN" in line and "iperf3" in line and f":{self.port}" in line:
+                pid = int(line.split()[-1].split("/")[0])
+                logging.debug("Killing zombie server on port %i: %i", self.port, pid)
+                self._ssh_session.run(["kill", "-9", str(pid)])
+
+    def start(
+        self,
+        extra_args: str = "",
+        tag: str = "",
+        iperf_binary: str | None = None,
+    ) -> None:
+        """Starts iperf server on specified machine and port.
+
+        Args:
+            extra_args: Extra arguments to start iperf server with.
+            tag: Appended to log file name to identify logs from different
+                iperf runs.
+            iperf_binary: Location of iperf3 binary. If none, it is assumed the
+                the binary is in the path.
+        """
+        if self.started:
+            return
+
+        self._cleanup_iperf_port()
+        if not iperf_binary:
+            logging.debug(
+                "No iperf3 binary specified.  " "Assuming iperf3 is in the path."
+            )
+            iperf_binary = "iperf3"
+        else:
+            logging.debug(f"Using iperf3 binary located at {iperf_binary}")
+        iperf_command = f"{iperf_binary} -s -J -p {self.port}"
+
+        cmd = f"{iperf_command} {extra_args} > {self._get_remote_log_path()}"
+
+        job_result = self._get_ssh().run_async(cmd)
+        self._iperf_pid = job_result.stdout.decode("utf-8")
+        self._current_tag = tag
+
+    def stop(self) -> str | None:
+        """Stops the iperf server.
+
+        Returns:
+            The name of the log file generated from the terminated session, or
+            None if iperf wasn't started or ran successfully.
+        """
+        if not self.started:
+            return None
+
+        ssh = self._get_ssh()
+
+        if self._use_killall:
+            ssh.run(["killall", "iperf3"], ignore_status=True)
+        elif self._iperf_pid:
+            ssh.run(["kill", "-9", self._iperf_pid])
+
+        iperf_result = ssh.run(f"cat {self._get_remote_log_path()}")
+
+        log_file = self._get_full_file_path(self._current_tag)
+        with open(log_file, "wb") as f:
+            f.write(iperf_result.stdout)
+
+        ssh.run(["rm", self._get_remote_log_path()])
+        self._iperf_pid = None
+        return log_file
+
+    def _get_ssh(self) -> connection.SshConnection:
+        if self._ssh_session is None:
+            self._ssh_session = connection.SshConnection(self._ssh_settings)
+
+            # Disable NetworkManager on the test interface
+            self._nmcli = optional(nmcli.LinuxNmcliCommand(self._ssh_session))
+            if self._nmcli:
+                self._nmcli.setup_device(self.test_interface)
+
+        return self._ssh_session
+
+    def close_ssh(self) -> None:
+        """Closes the ssh session to the iperf server, if one exists, preventing
+        connection reset errors when rebooting server device.
+        """
+        if self.started:
+            self.stop()
+        if self._ssh_session:
+            self._ssh_session.close()
+            self._ssh_session = None
+
+    def get_systemd_journal(self) -> str:
+        had_ssh = False if self._ssh_session is None else True
+
+        self._journalctl.set_runner(self._get_ssh())
+        logs = self._journalctl.logs()
+
+        if not had_ssh:
+            # Return to closed state
+            self.close_ssh()
+
+        return logs
+
+    def download_logs(self, path: str) -> None:
+        """Download all available logs to path.
+
+        Args:
+            path: Path to write logs to.
+        """
+        timestamp = logger.normalize_log_line_timestamp(
+            logger.epoch_to_log_line_timestamp(utils.get_current_epoch_time())
+        )
+
+        systemd_journal = self.get_systemd_journal()
+        systemd_journal_path = os.path.join(path, f"iperf_systemd_{timestamp}.log")
+        with open(systemd_journal_path, "a") as f:
+            f.write(systemd_journal)
+        self.log.info(f"Wrote systemd journal to {systemd_journal_path}")
diff --git a/packages/antlion/controllers/openwrt_ap.py b/packages/antlion/controllers/openwrt_ap.py
new file mode 100644
index 0000000..bb5c3a7
--- /dev/null
+++ b/packages/antlion/controllers/openwrt_ap.py
@@ -0,0 +1,517 @@
+#!/usr/bin/env python3
+#
+# Copyright 2024 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Controller for Open WRT access point."""
+
+from __future__ import annotations
+
+import logging
+import random
+import re
+import time
+from typing import Literal
+
+import yaml
+from mobly import logger, signals
+
+from antlion.controllers.openwrt_lib import (
+    network_settings,
+    wireless_config,
+    wireless_settings_applier,
+)
+from antlion.controllers.openwrt_lib.openwrt_constants import SYSTEM_INFO_CMD
+from antlion.controllers.openwrt_lib.openwrt_constants import (
+    OpenWrtModelMap as modelmap,
+)
+from antlion.controllers.openwrt_lib.openwrt_constants import OpenWrtWifiSetting
+from antlion.controllers.utils_lib.ssh import connection, settings
+from antlion.types import ControllerConfig, Json
+
+MOBLY_CONTROLLER_CONFIG_NAME: str = "OpenWrtAP"
+ACTS_CONTROLLER_REFERENCE_NAME = "access_points"
+OWE_SECURITY = "owe"
+SAE_SECURITY = "sae"
+SAEMIXED_SECURITY = "sae-mixed"
+ENABLE_RADIO = "0"
+PMF_ENABLED = 2
+WAIT_TIME = 20
+DEFAULT_RADIOS = ("radio0", "radio1")
+
+
+def create(configs: list[ControllerConfig]) -> list[OpenWrtAP]:
+    """Creates ap controllers from a json config.
+
+    Creates an ap controller from either a list, or a single element. The element
+    can either be just the hostname or a dictionary containing the hostname and
+    username of the AP to connect to over SSH.
+
+    Args:
+      configs: The json configs that represent this controller.
+
+    Returns:
+      OpenWrtAP objects
+
+    Example:
+      Below is the config file entry for OpenWrtAP as a list. A testbed can have
+      1 or more APs to configure. Each AP has a "ssh_config" key to provide SSH
+      login information. OpenWrtAP#__init__() uses this to create SSH object.
+
+        "OpenWrtAP": [
+          {
+            "ssh_config": {
+              "user" : "root",
+              "host" : "192.168.1.1"
+            }
+          },
+          {
+            "ssh_config": {
+              "user" : "root",
+              "host" : "192.168.1.2"
+            }
+          }
+        ]
+    """
+    return [OpenWrtAP(c) for c in configs]
+
+
+def destroy(objects: list[OpenWrtAP]) -> None:
+    """Destroys a list of OpenWrtAP.
+
+    Args:
+      aps: The list of OpenWrtAP to destroy.
+    """
+    for ap in objects:
+        ap.close()
+        ap.close_ssh()
+
+
+def get_info(objects: list[OpenWrtAP]) -> list[Json]:
+    """Get information on a list of access points.
+
+    Args:
+      aps: A list of OpenWrtAP.
+
+    Returns:
+      A list of all aps hostname.
+    """
+    return [ap.ssh_settings.hostname for ap in objects]
+
+
+BSSIDMap = dict[Literal["2g", "5g"], dict[str, str]]
+
+
+class OpenWrtAP(object):
+    """An OpenWrtAP controller.
+
+    Attributes:
+      ssh: The ssh connection to the AP.
+      ssh_settings: The ssh settings being used by the ssh connection.
+      log: Logging object for OpenWrtAP.
+      wireless_setting: object holding wireless configuration.
+      network_setting: Object for network configuration.
+      model: OpenWrt HW model.
+      radios: Fit interface for test.
+    """
+
+    def __init__(self, config):
+        """Initialize AP."""
+        self.ssh_settings = settings.from_config(config["ssh_config"])
+        self.ssh = connection.SshConnection(self.ssh_settings)
+        self.log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[OpenWrtAP|{self.ssh_settings.hostname}]",
+            },
+        )
+        self.wireless_setting: (
+            wireless_settings_applier.WirelessSettingsApplier | None
+        ) = None
+        self.network_setting = network_settings.NetworkSettings(
+            self.ssh, self.ssh_settings, self.log
+        )
+        self.model = self.get_model_name()
+        if self.model in modelmap.__dict__:
+            self.radios = modelmap.__dict__[self.model]
+        else:
+            self.radios = DEFAULT_RADIOS
+
+    def configure_ap(
+        self,
+        wireless_configs: list[wireless_config.WirelessConfig],
+        channel_2g: int,
+        channel_5g: int,
+    ):
+        """Configure AP with the required settings.
+
+        Each test class inherits WifiBaseTest. Based on the test, we may need to
+        configure PSK, WEP, OPEN, ENT networks on 2G and 5G bands in any
+        combination. We call WifiBaseTest methods get_psk_network(),
+        get_open_network(), get_wep_network() and get_ent_network() to create
+        dictionaries which contains this information. 'wifi_configs' is a list of
+        such dictionaries. Example below configures 2 WiFi networks - 1 PSK 2G and
+        1 Open 5G on one AP. configure_ap() is called from WifiBaseTest to
+        configure the APs.
+
+        wifi_configs = [
+          {
+            '2g': {
+              'SSID': '2g_AkqXWPK4',
+              'security': 'psk2',
+              'password': 'YgYuXqDO9H',
+              'hiddenSSID': False
+            },
+          },
+          {
+            '5g': {
+              'SSID': '5g_8IcMR1Sg',
+              'security': 'none',
+              'hiddenSSID': False
+            },
+          }
+        ]
+
+        Args:
+          wifi_configs: list of network settings for 2G and 5G bands.
+          channel_2g: channel for 2G band.
+          channel_5g: channel for 5G band.
+        """
+        self.wireless_setting = wireless_settings_applier.WirelessSettingsApplier(
+            self.ssh,
+            wireless_configs,
+            channel_2g,
+            channel_5g,
+            self.radios[1],
+            self.radios[0],
+        )
+        self.wireless_setting.apply_wireless_settings()
+
+    def start_ap(self):
+        """Starts the AP with the settings in /etc/config/wireless."""
+        self.ssh.run("wifi up")
+        curr_time = time.time()
+        while time.time() < curr_time + WAIT_TIME:
+            if self.get_wifi_status():
+                return
+            time.sleep(3)
+        if not self.get_wifi_status():
+            raise ValueError("Failed to turn on WiFi on the AP.")
+
+    def stop_ap(self):
+        """Stops the AP."""
+        self.ssh.run("wifi down")
+        curr_time = time.time()
+        while time.time() < curr_time + WAIT_TIME:
+            if not self.get_wifi_status():
+                return
+            time.sleep(3)
+        if self.get_wifi_status():
+            raise ValueError("Failed to turn off WiFi on the AP.")
+
+    def get_bssids_for_wifi_networks(self) -> BSSIDMap:
+        """Get BSSIDs for wifi networks configured.
+
+        Returns:
+          Dictionary of SSID - BSSID map for both bands.
+        """
+        bssid_map: BSSIDMap = {"2g": {}, "5g": {}}
+        for radio in self.radios:
+            ssid_ifname_map = self.get_ifnames_for_ssids(radio)
+            if radio == self.radios[0]:
+                for ssid, ifname in ssid_ifname_map.items():
+                    bssid_map["5g"][ssid] = self.get_bssid(ifname)
+            elif radio == self.radios[1]:
+                for ssid, ifname in ssid_ifname_map.items():
+                    bssid_map["2g"][ssid] = self.get_bssid(ifname)
+        return bssid_map
+
+    def get_ifnames_for_ssids(self, radio) -> dict[str, str]:
+        """Get interfaces for wifi networks.
+
+        Args:
+          radio: 2g or 5g radio get the bssids from.
+
+        Returns:
+          dictionary of ssid - ifname mappings.
+        """
+        ssid_ifname_map: dict[str, str] = {}
+        str_output = self.ssh.run(f"wifi status {radio}").stdout.decode("utf-8")
+        wifi_status = yaml.load(
+            str_output.replace("\t", "").replace("\n", ""), Loader=yaml.SafeLoader
+        )
+        wifi_status = wifi_status[radio]
+        if wifi_status["up"]:
+            interfaces = wifi_status["interfaces"]
+            for config in interfaces:
+                ssid = config["config"]["ssid"]
+                ifname = config["ifname"]
+                ssid_ifname_map[ssid] = ifname
+        return ssid_ifname_map
+
+    def get_bssid(self, ifname):
+        """Get MAC address from an interface.
+
+        Args:
+          ifname: interface name of the corresponding MAC.
+
+        Returns:
+          BSSID of the interface.
+        """
+        ifconfig = self.ssh.run(f"ifconfig {ifname}").stdout.decode("utf-8")
+        mac_addr = ifconfig.split("\n")[0].split()[-1]
+        return mac_addr
+
+    def set_wpa_encryption(self, encryption):
+        """Set different encryptions to wpa or wpa2.
+
+        Args:
+          encryption: ccmp, tkip, or ccmp+tkip.
+        """
+        str_output = self.ssh.run("wifi status").stdout.decode("utf-8")
+        wifi_status = yaml.load(
+            str_output.replace("\t", "").replace("\n", ""), Loader=yaml.SafeLoader
+        )
+
+        # Counting how many interface are enabled.
+        total_interface = 0
+        for radio in self.radios:
+            num_interface = len(wifi_status[radio]["interfaces"])
+            total_interface += num_interface
+
+        # Iterates every interface to get and set wpa encryption.
+        default_extra_interface = 2
+        for i in range(total_interface + default_extra_interface):
+            origin_encryption = self.ssh.run(
+                f"uci get wireless.@wifi-iface[{i}].encryption"
+            ).stdout.decode("utf-8")
+            origin_psk_pattern = re.match(r"psk\b", origin_encryption)
+            target_psk_pattern = re.match(r"psk\b", encryption)
+            origin_psk2_pattern = re.match(r"psk2\b", origin_encryption)
+            target_psk2_pattern = re.match(r"psk2\b", encryption)
+
+            if origin_psk_pattern == target_psk_pattern:
+                self.ssh.run(
+                    f"uci set wireless.@wifi-iface[{i}].encryption={encryption}"
+                )
+
+            if origin_psk2_pattern == target_psk2_pattern:
+                self.ssh.run(
+                    f"uci set wireless.@wifi-iface[{i}].encryption={encryption}"
+                )
+
+        self.ssh.run("uci commit wireless")
+        self.ssh.run("wifi")
+
+    def set_password(self, pwd_5g=None, pwd_2g=None):
+        """Set password for individual interface.
+
+        Args:
+            pwd_5g: 8 ~ 63 chars, ascii letters and digits password for 5g network.
+            pwd_2g: 8 ~ 63 chars, ascii letters and digits password for 2g network.
+        """
+        if pwd_5g:
+            if len(pwd_5g) < 8 or len(pwd_5g) > 63:
+                self.log.error("Password must be 8~63 characters long")
+            # Only accept ascii letters and digits
+            elif not re.match("^[A-Za-z0-9]*$", pwd_5g):
+                self.log.error("Password must only contains ascii letters and digits")
+            else:
+                self.ssh.run(f"uci set wireless.@wifi-iface[{3}].key={pwd_5g}")
+                self.log.info(f"Set 5G password to :{pwd_5g}")
+
+        if pwd_2g:
+            if len(pwd_2g) < 8 or len(pwd_2g) > 63:
+                self.log.error("Password must be 8~63 characters long")
+            # Only accept ascii letters and digits
+            elif not re.match("^[A-Za-z0-9]*$", pwd_2g):
+                self.log.error("Password must only contains ascii letters and digits")
+            else:
+                self.ssh.run(f"uci set wireless.@wifi-iface[{2}].key={pwd_2g}")
+                self.log.info(f"Set 2G password to :{pwd_2g}")
+
+        self.ssh.run("uci commit wireless")
+        self.ssh.run("wifi")
+
+    def set_ssid(self, ssid_5g=None, ssid_2g=None):
+        """Set SSID for individual interface.
+
+        Args:
+            ssid_5g: 8 ~ 63 chars for 5g network.
+            ssid_2g: 8 ~ 63 chars for 2g network.
+        """
+        if ssid_5g:
+            if len(ssid_5g) < 8 or len(ssid_5g) > 63:
+                self.log.error("SSID must be 8~63 characters long")
+            # Only accept ascii letters and digits
+            else:
+                self.ssh.run(f"uci set wireless.@wifi-iface[{3}].ssid={ssid_5g}")
+                self.log.info(f"Set 5G SSID to :{ssid_5g}")
+
+        if ssid_2g:
+            if len(ssid_2g) < 8 or len(ssid_2g) > 63:
+                self.log.error("SSID must be 8~63 characters long")
+            # Only accept ascii letters and digits
+            else:
+                self.ssh.run(f"uci set wireless.@wifi-iface[{2}].ssid={ssid_2g}")
+                self.log.info(f"Set 2G SSID to :{ssid_2g}")
+
+        self.ssh.run("uci commit wireless")
+        self.ssh.run("wifi")
+
+    def generate_mobility_domain(self):
+        """Generate 4-character hexadecimal ID.
+
+        Returns:
+          String; a 4-character hexadecimal ID.
+        """
+        md = f"{random.getrandbits(16):04x}"
+        self.log.info(f"Mobility Domain ID: {md}")
+        return md
+
+    def enable_80211r(self, iface, md):
+        """Enable 802.11r for one single radio.
+
+        Args:
+          iface: index number of wifi-iface.
+                  2: radio1
+                  3: radio0
+          md: mobility domain. a 4-character hexadecimal ID.
+        Raises:
+          TestSkip if 2g or 5g radio is not up or 802.11r is not enabled.
+        """
+        str_output = self.ssh.run("wifi status").stdout.decode("utf-8")
+        wifi_status = yaml.load(
+            str_output.replace("\t", "").replace("\n", ""), Loader=yaml.SafeLoader
+        )
+        # Check if the radio is up.
+        if iface == OpenWrtWifiSetting.IFACE_2G:
+            if wifi_status[self.radios[1]]["up"]:
+                self.log.info("2g network is ENABLED")
+            else:
+                raise signals.TestSkip("2g network is NOT ENABLED")
+        elif iface == OpenWrtWifiSetting.IFACE_5G:
+            if wifi_status[self.radios[0]]["up"]:
+                self.log.info("5g network is ENABLED")
+            else:
+                raise signals.TestSkip("5g network is NOT ENABLED")
+
+        # Setup 802.11r.
+        self.ssh.run(f"uci set wireless.@wifi-iface[{iface}].ieee80211r='1'")
+        self.ssh.run(f"uci set wireless.@wifi-iface[{iface}].ft_psk_generate_local='1'")
+        self.ssh.run(f"uci set wireless.@wifi-iface[{iface}].mobility_domain='{md}'")
+        self.ssh.run("uci commit wireless")
+        self.ssh.run("wifi")
+
+        # Check if 802.11r is enabled.
+        result = self.ssh.run(
+            f"uci get wireless.@wifi-iface[{iface}].ieee80211r"
+        ).stdout.decode("utf-8")
+        if result == "1":
+            self.log.info("802.11r is ENABLED")
+        else:
+            raise signals.TestSkip("802.11r is NOT ENABLED")
+
+    def get_wifi_network(self, security=None, band=None):
+        """Return first match wifi interface's config.
+
+        Args:
+          security: psk2 or none
+          band: '2g' or '5g'
+
+        Returns:
+          A dict contains match wifi interface's config.
+        """
+        if not self.wireless_setting:
+            raise RuntimeError("The AP has not been configured yet; run configure_ap()")
+
+        for wifi_iface in self.wireless_setting.wireless_configs:
+            match_list = []
+            wifi_network = wifi_iface.__dict__
+            if security:
+                match_list.append(security == wifi_network["security"])
+            if band:
+                match_list.append(band == wifi_network["band"])
+
+            if all(match_list):
+                wifi_network["SSID"] = wifi_network["ssid"]
+                if not wifi_network["password"]:
+                    del wifi_network["password"]
+                return wifi_network
+        return None
+
+    def get_wifi_status(self):
+        """Check if radios are up. Default are 2G and 5G bands.
+
+        Returns:
+          True if both radios are up. False if not.
+        """
+        status = True
+        for radio in self.radios:
+            try:
+                str_output = self.ssh.run(f"wifi status {radio}").stdout.decode("utf-8")
+                wifi_status = yaml.load(
+                    str_output.replace("\t", "").replace("\n", ""),
+                    Loader=yaml.SafeLoader,
+                )
+                status = wifi_status[radio]["up"] and status
+            except:
+                self.log.info("Failed to make ssh connection to the OpenWrt")
+                return False
+        return status
+
+    def verify_wifi_status(self, timeout=20):
+        """Ensure wifi interfaces are ready.
+
+        Args:
+          timeout: An integer that is the number of times to try
+                   wait for interface ready.
+        Returns:
+          True if both radios are up. False if not.
+        """
+        start_time = time.time()
+        end_time = start_time + timeout
+        while time.time() < end_time:
+            if self.get_wifi_status():
+                return True
+            time.sleep(1)
+        return False
+
+    def get_model_name(self):
+        """Get Openwrt model name.
+
+        Returns:
+          A string include device brand and model. e.g. NETGEAR_R8000
+        """
+        out = self.ssh.run(SYSTEM_INFO_CMD).stdout.decode("utf-8").split("\n")
+        for line in out:
+            if "board_name" in line:
+                model = line.split()[1].strip('",').split(",")
+                return "_".join(map(lambda i: i.upper(), model))
+        self.log.info("Failed to retrieve OpenWrt model information.")
+        return None
+
+    def close(self):
+        """Reset wireless and network settings to default and stop AP."""
+        if self.network_setting.config:
+            self.network_setting.cleanup_network_settings()
+        if self.wireless_setting:
+            self.wireless_setting.cleanup_wireless_settings()
+
+    def close_ssh(self):
+        """Close SSH connection to AP."""
+        self.ssh.close()
+
+    def reboot(self):
+        """Reboot Openwrt."""
+        self.ssh.run("reboot")
diff --git a/src/antlion/controllers/openwrt_lib/OWNERS b/packages/antlion/controllers/openwrt_lib/OWNERS
similarity index 100%
rename from src/antlion/controllers/openwrt_lib/OWNERS
rename to packages/antlion/controllers/openwrt_lib/OWNERS
diff --git a/src/antlion/controllers/openwrt_lib/__init__.py b/packages/antlion/controllers/openwrt_lib/__init__.py
similarity index 100%
rename from src/antlion/controllers/openwrt_lib/__init__.py
rename to packages/antlion/controllers/openwrt_lib/__init__.py
diff --git a/src/antlion/controllers/openwrt_lib/network_const.py b/packages/antlion/controllers/openwrt_lib/network_const.py
similarity index 99%
rename from src/antlion/controllers/openwrt_lib/network_const.py
rename to packages/antlion/controllers/openwrt_lib/network_const.py
index 3b05b83..7375ff7 100644
--- a/src/antlion/controllers/openwrt_lib/network_const.py
+++ b/packages/antlion/controllers/openwrt_lib/network_const.py
@@ -127,7 +127,7 @@
         "leftauth": "pubkey",
         "leftsendcert": "always",
         "right": "%any",
-        "rightid": "vpntest@%s" % LOCALHOST,
+        "rightid": f"vpntest@{LOCALHOST}",
         "rightauth": "pubkey",
         "rightcert": "clientCert.pem",
         "auto": "add",
diff --git a/src/antlion/controllers/openwrt_lib/network_settings.py b/packages/antlion/controllers/openwrt_lib/network_settings.py
similarity index 81%
rename from src/antlion/controllers/openwrt_lib/network_settings.py
rename to packages/antlion/controllers/openwrt_lib/network_settings.py
index 5d14360..8a8494c 100644
--- a/src/antlion/controllers/openwrt_lib/network_settings.py
+++ b/packages/antlion/controllers/openwrt_lib/network_settings.py
@@ -15,7 +15,8 @@
 import re
 import time
 
-from antlion import signals
+from mobly import signals
+
 from antlion import utils
 from antlion.controllers.openwrt_lib import network_const
 
@@ -59,7 +60,6 @@
         log: Logging object for AccessPoint.
         config: A list to store changes on network settings.
         firewall_rules_list: A list of firewall rule name list.
-        cleanup_map: A dict for compare oppo functions.
         l2tp: profile for vpn l2tp server.
     """
 
@@ -79,20 +79,6 @@
         self.log = logger
         self.config = set()
         self.firewall_rules_list = []
-        self.cleanup_map = {
-            "setup_dns_server": self.remove_dns_server,
-            "setup_vpn_pptp_server": self.remove_vpn_pptp_server,
-            "setup_vpn_l2tp_server": self.remove_vpn_l2tp_server,
-            "disable_ipv6": self.enable_ipv6,
-            "setup_ipv6_bridge": self.remove_ipv6_bridge,
-            "default_dns": self.del_default_dns,
-            "default_v6_dns": self.del_default_v6_dns,
-            "ipv6_prefer_option": self.remove_ipv6_prefer_option,
-            "block_dns_response": self.unblock_dns_response,
-            "setup_mdns": self.remove_mdns,
-            "add_dhcp_rapid_commit": self.remove_dhcp_rapid_commit,
-            "setup_captive_portal": self.remove_cpative_portal,
-        }
         # This map contains cleanup functions to restore the configuration to
         # its default state. We write these keys to HISTORY_CONFIG_PATH prior to
         # making any changes to that subsystem.
@@ -106,7 +92,7 @@
 
         # Detect if any changes that is not clean up.
         if self.file_exists(HISTORY_CONFIG_PATH):
-            out = self.ssh.run("cat %s" % HISTORY_CONFIG_PATH).stdout
+            out = self.ssh.run(f"cat {HISTORY_CONFIG_PATH}").stdout
             if out:
                 self.config = set(out.split("\n"))
 
@@ -114,16 +100,48 @@
             temp = self.config.copy()
             for change in temp:
                 change_list = change.split()
-                if len(change_list) > 1:
-                    self.cleanup_map[change_list[0]](*change_list[1:])
+
+                command = change_list[0]
+                args = change_list[1:]
+                if command == "setup_dns_server":
+                    self.remove_dns_server()
+                elif command == "setup_vpn_pptp_server":
+                    self.remove_vpn_pptp_server()
+                elif command == "setup_vpn_l2tp_server":
+                    self.remove_vpn_l2tp_server()
+                elif command == "disable_ipv6":
+                    self.enable_ipv6()
+                elif command == "setup_ipv6_bridge":
+                    self.remove_ipv6_bridge()
+                elif command == "default_dns":
+                    addr_list = str(change_list[1])
+                    self.del_default_dns(addr_list)
+                elif command == "default_v6_dns":
+                    addr_list = str(change_list[1])
+                    self.del_default_v6_dns(addr_list)
+                elif command == "ipv6_prefer_option":
+                    self.remove_ipv6_prefer_option()
+                elif command == "block_dns_response":
+                    self.unblock_dns_response()
+                elif command == "setup_mdns":
+                    self.remove_mdns()
+                elif command == "add_dhcp_rapid_commit":
+                    self.remove_dhcp_rapid_commit()
+                elif command == "setup_captive_portal":
+                    try:
+                        fas_port = int(change_list[1])
+                    except IndexError:
+                        fas_port = 1000
+                    self.remove_cpative_portal(fas_port)
                 else:
-                    self.cleanup_map[change]()
+                    raise TypeError(f'Unknown command "{change}"')
+
             self.config = set()
 
         if self.file_exists(HISTORY_CONFIG_PATH):
-            out = self.ssh.run("cat %s" % HISTORY_CONFIG_PATH).stdout
+            out = self.ssh.run(f"cat {HISTORY_CONFIG_PATH}").stdout
             if not out:
-                self.ssh.run("rm %s" % HISTORY_CONFIG_PATH)
+                self.ssh.run(f"rm {HISTORY_CONFIG_PATH}")
 
     def commit_changes(self):
         """Apply changes on Access point."""
@@ -142,12 +160,12 @@
         for package_name in package_list.split(" "):
             if not self._package_installed(package_name):
                 self.ssh.run(
-                    "opkg install %s" % package_name,
+                    f"opkg install {package_name}",
                     timeout=DEFAULT_PACKAGE_INSTALL_TIMEOUT,
                 )
-                self.log.info("Package: %s installed." % package_name)
+                self.log.info(f"Package: {package_name} installed.")
             else:
-                self.log.info("Package: %s skipped (already installed)." % package_name)
+                self.log.info(f"Package: {package_name} skipped (already installed).")
 
     def package_remove(self, package_list):
         """Remove packages on OpenWrtAP via opkg If existed.
@@ -157,10 +175,10 @@
         """
         for package_name in package_list.split(" "):
             if self._package_installed(package_name):
-                self.ssh.run("opkg remove %s" % package_name)
-                self.log.info("Package: %s removed." % package_name)
+                self.ssh.run(f"opkg remove {package_name}")
+                self.log.info(f"Package: {package_name} removed.")
             else:
-                self.log.info("No exist package %s found." % package_name)
+                self.log.info(f"No exist package {package_name} found.")
 
     def _package_installed(self, package_name):
         """Check if target package installed on OpenWrtAP.
@@ -171,7 +189,7 @@
         Returns:
             True if installed.
         """
-        if self.ssh.run("opkg list-installed %s" % package_name).stdout:
+        if self.ssh.run(f"opkg list-installed {package_name}").stdout:
             return True
         return False
 
@@ -185,9 +203,7 @@
             True if Existed.
         """
         path, file_name = abs_file_path.rsplit("/", 1)
-        if self.ssh.run(
-            "ls %s | grep %s" % (path, file_name), ignore_status=True
-        ).stdout:
+        if self.ssh.run(f"ls {path} | grep {file_name}", ignore_status=True).stdout:
             return True
         return False
 
@@ -198,7 +214,7 @@
             abs_path: absolutely path for create folder.
         """
         try:
-            self.ssh.run("ls %s" % abs_path)
+            self.ssh.run(f"ls {abs_path}")
         except:
             return False
         return True
@@ -210,9 +226,9 @@
             abs_path: absolutely path for create folder.
         """
         if not self.path_exists(abs_path):
-            self.ssh.run("mkdir %s" % abs_path)
+            self.ssh.run(f"mkdir {abs_path}")
         else:
-            self.log.info("%s already existed." % abs_path)
+            self.log.info(f"{abs_path} already existed.")
 
     def count(self, config, key):
         """Count in uci config.
@@ -224,7 +240,7 @@
             Numbers of the count.
         """
         count = self.ssh.run(
-            "uci show %s | grep =%s" % (config, key), ignore_status=True
+            f"uci show {config} | grep ={key}", ignore_status=True
         ).stdout
         return len(count.split("\n"))
 
@@ -235,7 +251,7 @@
             config: A string of content of config.
             file_path: Config's abs_path.
         """
-        self.ssh.run('echo -e "%s" > %s' % (config, file_path))
+        self.ssh.run(f'echo -e "{config}" > {file_path}')
 
     def replace_config_option(self, old_option, new_option, file_path):
         """Replace config option if pattern match.
@@ -248,10 +264,10 @@
             new_option: the option to add.
             file_path: Config's abs_path.
         """
-        config = self.ssh.run("cat %s" % file_path).stdout
+        config = self.ssh.run(f"cat {file_path}").stdout
         config, count = re.subn(old_option, new_option, config)
         if not count:
-            config = "\n".join([config, new_option])
+            config = f"{config}\n{new_option}"
         self.create_config_file(config, file_path)
 
     def remove_config_option(self, option, file_path):
@@ -263,7 +279,7 @@
         Returns:
             Boolean for find option to remove.
         """
-        config = self.ssh.run("cat %s" % file_path).stdout.split("\n")
+        config = self.ssh.run(f"cat {file_path}").stdout.split("\n")
         for line in config:
             count = re.subn(option, "", line)[1]
             if count > 0:
@@ -280,9 +296,9 @@
             domain_name: Local dns domain name.
         """
         self.config.add("setup_dns_server")
-        self.log.info("Setup DNS server with domain name %s" % domain_name)
-        self.ssh.run("uci set dhcp.@dnsmasq[0].local='/%s/'" % domain_name)
-        self.ssh.run("uci set dhcp.@dnsmasq[0].domain='%s'" % domain_name)
+        self.log.info(f"Setup DNS server with domain name {domain_name}")
+        self.ssh.run(f"uci set dhcp.@dnsmasq[0].local='/{domain_name}/'")
+        self.ssh.run(f"uci set dhcp.@dnsmasq[0].domain='{domain_name}'")
         self.add_resource_record(domain_name, self.ip)
         self.service_manager.need_restart(SERVICE_DNSMASQ)
         self.commit_changes()
@@ -315,8 +331,8 @@
             domain_ip: A string for domain ip.
         """
         self.ssh.run("uci add dhcp domain")
-        self.ssh.run("uci set dhcp.@domain[-1].name='%s'" % domain_name)
-        self.ssh.run("uci set dhcp.@domain[-1].ip='%s'" % domain_ip)
+        self.ssh.run(f"uci set dhcp.@domain[-1].name='{domain_name}'")
+        self.ssh.run(f"uci set dhcp.@domain[-1].ip='{domain_ip}'")
         self.service_manager.need_restart(SERVICE_DNSMASQ)
 
     def del_resource_record(self):
@@ -401,16 +417,16 @@
         remote_ip = ".".join(remote_ip)
         # Enable pptp service and set ip addr
         self.ssh.run("uci set pptpd.pptpd.enabled=1")
-        self.ssh.run("uci set pptpd.pptpd.localip='%s'" % local_ip)
-        self.ssh.run("uci set pptpd.pptpd.remoteip='%s-250'" % remote_ip)
+        self.ssh.run(f"uci set pptpd.pptpd.localip='{local_ip}'")
+        self.ssh.run(f"uci set pptpd.pptpd.remoteip='{remote_ip}-250'")
 
         # Setup pptp service account
-        self.ssh.run("uci set pptpd.@login[0].username='%s'" % username)
-        self.ssh.run("uci set pptpd.@login[0].password='%s'" % password)
+        self.ssh.run(f"uci set pptpd.@login[0].username='{username}'")
+        self.ssh.run(f"uci set pptpd.@login[0].password='{password}'")
         self.service_manager.need_restart(SERVICE_PPTPD)
 
         self.replace_config_option(
-            r"#*ms-dns \d+.\d+.\d+.\d+", "ms-dns %s" % ms_dns, PPTPD_OPTION_PATH
+            r"#*ms-dns \d+.\d+.\d+.\d+", f"ms-dns {ms_dns}", PPTPD_OPTION_PATH
         )
         self.replace_config_option("(#no)*proxyarp", "proxyarp", PPTPD_OPTION_PATH)
 
@@ -502,7 +518,7 @@
             "   plugins {",
             "       include strongswan.d/charon/*.conf",
             "   }",
-            "   dns1=%s" % dns,
+            f"   dns1={dns}",
             "}",
         ]
         self.create_config_file("\n".join(config), "/etc/strongswan.conf")
@@ -510,19 +526,19 @@
     def setup_ipsec(self):
         """Setup ipsec config."""
 
+        config: list[str] = []
+
         def load_ipsec_config(data, rightsourceip=False):
             for i in data.keys():
                 config.append(i)
                 for j in data[i].keys():
-                    config.append("\t %s=%s" % (j, data[i][j]))
+                    config.append(f"\t {j}={data[i][j]}")
                 if rightsourceip:
                     config.append(
-                        "\t rightsourceip=%s.16/26"
-                        % self.l2tp.address.rsplit(".", 1)[0]
+                        f"\t rightsourceip={self.l2tp.address.rsplit('.', 1)[0]}.16/26"
                     )
                 config.append("")
 
-        config = []
         load_ipsec_config(network_const.IPSEC_IKEV2_MSCHAPV2, True)
         load_ipsec_config(network_const.IPSEC_IKEV2_PSK, True)
         load_ipsec_config(network_const.IPSEC_IKEV2_RSA, True)
@@ -549,25 +565,24 @@
         """Setup xl2tpd config."""
         net_id, host_id = self.l2tp.address.rsplit(".", 1)
         xl2tpd_conf = list(network_const.XL2TPD_CONF_GLOBAL)
-        xl2tpd_conf.append("auth file = %s" % PPP_CHAP_SECRET_PATH)
+        xl2tpd_conf.append(f"auth file = {PPP_CHAP_SECRET_PATH}")
         xl2tpd_conf.extend(network_const.XL2TPD_CONF_INS)
         xl2tpd_conf.append(
-            "ip range = %s.%s-%s.%s"
-            % (net_id, host_id, net_id, str(int(host_id) + ip_range))
+            f"ip range = {net_id}.{host_id}-{net_id}.{str(int(host_id) + ip_range)}"
         )
-        xl2tpd_conf.append("local ip = %s" % self.l2tp.address)
-        xl2tpd_conf.append("name = %s" % self.l2tp.name)
-        xl2tpd_conf.append("pppoptfile = %s" % XL2TPD_OPTION_CONFIG_PATH)
+        xl2tpd_conf.append(f"local ip = {self.l2tp.address}")
+        xl2tpd_conf.append(f"name = {self.l2tp.name}")
+        xl2tpd_conf.append(f"pppoptfile = {XL2TPD_OPTION_CONFIG_PATH}")
 
         self.create_config_file("\n".join(xl2tpd_conf), XL2TPD_CONFIG_PATH)
         xl2tpd_option = list(network_const.XL2TPD_OPTION)
-        xl2tpd_option.append("name %s" % self.l2tp.name)
+        xl2tpd_option.append(f"name {self.l2tp.name}")
         self.create_config_file("\n".join(xl2tpd_option), XL2TPD_OPTION_CONFIG_PATH)
 
     def setup_ppp_secret(self):
         self.replace_config_option(
             r"\S+ %s \S+ \*" % self.l2tp.name,
-            "%s %s %s *" % (self.l2tp.username, self.l2tp.name, self.l2tp.password),
+            f"{self.l2tp.username} {self.l2tp.name} {self.l2tp.password} *",
             PPP_CHAP_SECRET_PATH,
         )
 
@@ -577,15 +592,13 @@
         lifetime = "--lifetime 365"
         size = "--size 4096"
 
-        self.ssh.run("ipsec pki --gen %s %s --outform der > caKey.der" % (rsa, size))
+        self.ssh.run(f"ipsec pki --gen {rsa} {size} --outform der > caKey.der")
         self.ssh.run(
             "ipsec pki --self --ca %s --in caKey.der %s --dn "
             '"C=%s, O=%s, CN=%s" --outform der > caCert.der'
             % (lifetime, rsa, country, org, self.l2tp.hostname)
         )
-        self.ssh.run(
-            "ipsec pki --gen %s %s --outform der > serverKey.der" % (size, rsa)
-        )
+        self.ssh.run(f"ipsec pki --gen {size} {rsa} --outform der > serverKey.der")
         self.ssh.run(
             "ipsec pki --pub --in serverKey.der %s | ipsec pki "
             "--issue %s --cacert caCert.der --cakey caKey.der "
@@ -593,9 +606,7 @@
             " --flag ikeIntermediate --outform der > serverCert.der"
             % (rsa, lifetime, country, org, self.l2tp.hostname, LOCALHOST)
         )
-        self.ssh.run(
-            "ipsec pki --gen %s %s --outform der > clientKey.der" % (size, rsa)
-        )
+        self.ssh.run(f"ipsec pki --gen {size} {rsa} --outform der > clientKey.der")
         self.ssh.run(
             "ipsec pki --pub --in clientKey.der %s | ipsec pki "
             "--issue %s --cacert caCert.der --cakey caKey.der "
@@ -637,11 +648,11 @@
             self.ssh.run("mkdir /www/downloads/")
 
         ikev2_vpn_cert_keys = [
-            "ipsec pki --gen %s %s --outform der > caKey.der" % (rsa, size),
+            f"ipsec pki --gen {rsa} {size} --outform der > caKey.der",
             "ipsec pki --self --ca %s --in caKey.der %s --dn "
             '"C=%s, O=%s, CN=%s" --outform der > caCert.der'
             % (lifetime, rsa, country, org, self.l2tp.hostname),
-            "ipsec pki --gen %s %s --outform der > serverKey.der" % (size, rsa),
+            f"ipsec pki --gen {size} {rsa} --outform der > serverKey.der",
             "ipsec pki --pub --in serverKey.der %s | ipsec pki --issue %s "
             r"--cacert caCert.der --cakey caKey.der --dn \"C=%s, O=%s, CN=%s\" "
             "--san %s --san %s --flag serverAuth --flag ikeIntermediate "
@@ -655,7 +666,7 @@
                 LOCALHOST,
                 self.l2tp.hostname,
             ),
-            "ipsec pki --gen %s %s --outform der > clientKey.der" % (size, rsa),
+            f"ipsec pki --gen {size} {rsa} --outform der > clientKey.der",
             "ipsec pki --pub --in clientKey.der %s | ipsec pki --issue %s "
             r"--cacert caCert.der --cakey caKey.der --dn \"C=%s, O=%s, CN=%s@%s\" "
             r"--san \"%s\" --san \"%s@%s\" --san \"%s@%s\" --outform der "
@@ -689,14 +700,14 @@
         file_string = "\n".join(ikev2_vpn_cert_keys)
         self.create_config_file(file_string, IKEV2_VPN_CERT_KEYS_PATH)
 
-        self.ssh.run("chmod +x %s" % IKEV2_VPN_CERT_KEYS_PATH)
-        self.ssh.run("%s" % IKEV2_VPN_CERT_KEYS_PATH)
+        self.ssh.run(f"chmod +x {IKEV2_VPN_CERT_KEYS_PATH}")
+        self.ssh.run(f"{IKEV2_VPN_CERT_KEYS_PATH}")
 
     def update_firewall_rules_list(self):
         """Update rule list in /etc/config/firewall."""
         new_rules_list = []
         for i in range(self.count("firewall", "rule")):
-            rule = self.ssh.run("uci get firewall.@rule[%s].name" % i).stdout
+            rule = self.ssh.run(f"uci get firewall.@rule[{i}].name").stdout
             new_rules_list.append(rule)
         self.firewall_rules_list = new_rules_list
 
@@ -728,12 +739,12 @@
         self.update_firewall_rules_list()
         if "pptpd" in self.firewall_rules_list:
             self.ssh.run(
-                "uci del firewall.@rule[%s]" % self.firewall_rules_list.index("pptpd")
+                f"uci del firewall.@rule[{self.firewall_rules_list.index('pptpd')}]"
             )
         self.update_firewall_rules_list()
         if "GRP" in self.firewall_rules_list:
             self.ssh.run(
-                "uci del firewall.@rule[%s]" % self.firewall_rules_list.index("GRP")
+                f"uci del firewall.@rule[{self.firewall_rules_list.index('GRP')}]"
             )
         self.remove_custom_firewall_rules()
         self.service_manager.need_restart(SERVICE_FIREWALL)
@@ -765,10 +776,9 @@
 
         net_id = self.l2tp.address.rsplit(".", 1)[0]
         iptable_rules = list(network_const.FIREWALL_RULES_FOR_L2TP)
-        iptable_rules.append("iptables -A FORWARD -s %s.0/24" "  -j ACCEPT" % net_id)
+        iptable_rules.append(f"iptables -A FORWARD -s {net_id}.0/24  -j ACCEPT")
         iptable_rules.append(
-            "iptables -t nat -A POSTROUTING"
-            " -s %s.0/24 -o eth0.2 -j MASQUERADE" % net_id
+            f"iptables -t nat -A POSTROUTING -s {net_id}.0/24 -o eth0.2 -j MASQUERADE"
         )
 
         self.add_custom_firewall_rules(iptable_rules)
@@ -779,8 +789,7 @@
         self.update_firewall_rules_list()
         if "ipsec esp" in self.firewall_rules_list:
             self.ssh.run(
-                "uci del firewall.@rule[%s]"
-                % self.firewall_rules_list.index("ipsec esp")
+                f"uci del firewall.@rule[{self.firewall_rules_list.index('ipsec esp')}]"
             )
         self.update_firewall_rules_list()
         if "ipsec nat-t" in self.firewall_rules_list:
@@ -803,20 +812,20 @@
         Args:
             rules: A list of iptable rules to apply.
         """
-        backup_file_path = FIREWALL_CUSTOM_OPTION_PATH + ".backup"
+        backup_file_path = f"{FIREWALL_CUSTOM_OPTION_PATH}.backup"
         if not self.file_exists(backup_file_path):
-            self.ssh.run("mv %s %s" % (FIREWALL_CUSTOM_OPTION_PATH, backup_file_path))
+            self.ssh.run(f"mv {FIREWALL_CUSTOM_OPTION_PATH} {backup_file_path}")
         for rule in rules:
-            self.ssh.run("echo %s >> %s" % (rule, FIREWALL_CUSTOM_OPTION_PATH))
+            self.ssh.run(f"echo {rule} >> {FIREWALL_CUSTOM_OPTION_PATH}")
 
     def remove_custom_firewall_rules(self):
         """Clean up and recover custom firewall rules."""
-        backup_file_path = FIREWALL_CUSTOM_OPTION_PATH + ".backup"
+        backup_file_path = f"{FIREWALL_CUSTOM_OPTION_PATH}.backup"
         if self.file_exists(backup_file_path):
-            self.ssh.run("mv %s %s" % (backup_file_path, FIREWALL_CUSTOM_OPTION_PATH))
+            self.ssh.run(f"mv {backup_file_path} {FIREWALL_CUSTOM_OPTION_PATH}")
         else:
-            self.log.debug("Did not find %s" % backup_file_path)
-            self.ssh.run("echo " " > %s" % FIREWALL_CUSTOM_OPTION_PATH)
+            self.log.debug(f"Did not find {backup_file_path}")
+            self.ssh.run(f"echo  > {FIREWALL_CUSTOM_OPTION_PATH}")
 
     def disable_pptp_service(self):
         """Disable pptp service."""
@@ -828,7 +837,7 @@
         self.ssh.run("uci set network.lan2.type=bridge")
         self.ssh.run("uci set network.lan2.ifname=eth1.2")
         self.ssh.run("uci set network.lan2.proto=static")
-        self.ssh.run('uci set network.lan2.ipaddr="%s"' % self.l2tp.address)
+        self.ssh.run(f'uci set network.lan2.ipaddr="{self.l2tp.address}"')
         self.ssh.run("uci set network.lan2.netmask=255.255.255.0")
         self.ssh.run("uci set network.lan2=interface")
         self.service_manager.reload(SERVICE_NETWORK)
@@ -892,10 +901,10 @@
             self.commit_changes()
 
     def _add_dhcp_option(self, args):
-        self.ssh.run('uci add_list dhcp.lan.dhcp_option="%s"' % args)
+        self.ssh.run(f'uci add_list dhcp.lan.dhcp_option="{args}"')
 
     def _remove_dhcp_option(self, args):
-        self.ssh.run('uci del_list dhcp.lan.dhcp_option="%s"' % args)
+        self.ssh.run(f'uci del_list dhcp.lan.dhcp_option="{args}"')
 
     def add_default_dns(self, addr_list):
         """Add default dns server for client.
@@ -903,41 +912,41 @@
         Args:
             addr_list: dns ip address for Openwrt client.
         """
-        self._add_dhcp_option("6,%s" % ",".join(addr_list))
-        self.config.add("default_dns %s" % addr_list)
+        self._add_dhcp_option(f'6,{",".join(addr_list)}')
+        self.config.add(f"default_dns {addr_list}")
         self.service_manager.need_restart(SERVICE_DNSMASQ)
         self.commit_changes()
 
-    def del_default_dns(self, addr_list):
+    def del_default_dns(self, addr_list: str):
         """Remove default dns server for client.
 
         Args:
             addr_list: list of dns ip address for Openwrt client.
         """
-        self._remove_dhcp_option("6,%s" % addr_list)
-        self.config.discard("default_dns %s" % addr_list)
+        self._remove_dhcp_option(f"6,{addr_list}")
+        self.config.discard(f"default_dns {addr_list}")
         self.service_manager.need_restart(SERVICE_DNSMASQ)
         self.commit_changes()
 
-    def add_default_v6_dns(self, addr_list):
+    def add_default_v6_dns(self, addr_list: str):
         """Add default v6 dns server for client.
 
         Args:
-            addr_list: dns ip address for Openwrt client.
+            addr_list: list of dns ip address for Openwrt client.
         """
-        self.ssh.run('uci add_list dhcp.lan.dns="%s"' % addr_list)
-        self.config.add("default_v6_dns %s" % addr_list)
+        self.ssh.run(f'uci add_list dhcp.lan.dns="{addr_list}"')
+        self.config.add(f"default_v6_dns {addr_list}")
         self.service_manager.need_restart(SERVICE_ODHCPD)
         self.commit_changes()
 
-    def del_default_v6_dns(self, addr_list):
+    def del_default_v6_dns(self, addr_list: str):
         """Del default v6 dns server for client.
 
         Args:
-            addr_list: dns ip address for Openwrt client.
+            addr_list: list of dns ip address for Openwrt client.
         """
-        self.ssh.run('uci del_list dhcp.lan.dns="%s"' % addr_list)
-        self.config.add("default_v6_dns %s" % addr_list)
+        self.ssh.run(f'uci del_list dhcp.lan.dns="{addr_list}"')
+        self.config.add(f"default_v6_dns {addr_list}")
         self.service_manager.need_restart(SERVICE_ODHCPD)
         self.commit_changes()
 
@@ -978,13 +987,11 @@
         """
         self.package_install("tcpdump")
         if not self.path_exists(TCPDUMP_DIR):
-            self.ssh.run("mkdir %s" % TCPDUMP_DIR)
-        tcpdump_file_name = "openwrt_%s_%s.pcap" % (
-            test_name,
-            time.strftime("%Y-%m-%d_%H-%M-%S", time.localtime(time.time())),
-        )
-        tcpdump_file_path = "".join([TCPDUMP_DIR, tcpdump_file_name])
-        cmd = "tcpdump -i %s -s0 %s -w %s" % (interface, args, tcpdump_file_path)
+            self.ssh.run(f"mkdir {TCPDUMP_DIR}")
+        now = (time.strftime("%Y-%m-%d_%H-%M-%S", time.localtime(time.time())),)
+        tcpdump_file_name = f"openwrt_{test_name}_{now}.pcap"
+        tcpdump_file_path = f"{TCPDUMP_DIR}{tcpdump_file_name}"
+        cmd = f"tcpdump -i {interface} -s0 {args} -w {tcpdump_file_path}"
         self.ssh.run_async(cmd)
         pid = self._get_tcpdump_pid(tcpdump_file_name)
         if not pid:
@@ -1005,17 +1012,17 @@
         # Set delay to prevent tcpdump fail to capture target packet.
         time.sleep(15)
         pid = self._get_tcpdump_pid(tcpdump_file_name)
-        self.ssh.run("kill -9 %s" % pid, ignore_status=True)
+        self.ssh.run(f"kill -9 {pid}", ignore_status=True)
         if self.path_exists(TCPDUMP_DIR) and pull_dir:
-            tcpdump_path = "".join([TCPDUMP_DIR, tcpdump_file_name])
-            tcpdump_remote_path = "/".join([pull_dir, tcpdump_file_name])
-            tcpdump_local_path = "%s@%s:%s" % (self.user, self.ip, tcpdump_path)
-            utils.exe_cmd("scp %s %s" % (tcpdump_local_path, tcpdump_remote_path))
+            tcpdump_path = f"{TCPDUMP_DIR}{tcpdump_file_name}"
+            tcpdump_remote_path = f"{pull_dir}/{tcpdump_file_name}"
+            tcpdump_local_path = f"{self.user}@{self.ip}:{tcpdump_path}"
+            utils.exe_cmd(f"scp {tcpdump_local_path} {tcpdump_remote_path}")
 
         if self._get_tcpdump_pid(tcpdump_file_name):
             raise signals.TestFailure("Failed to stop tcpdump on OpenWrt.")
         if self.file_exists(tcpdump_path):
-            self.ssh.run("rm -f %s" % tcpdump_path)
+            self.ssh.run(f"rm -f {tcpdump_path}")
         return tcpdump_remote_path if pull_dir else None
 
     def clear_tcpdump(self):
@@ -1023,13 +1030,11 @@
         if self.ssh.run("pgrep tcpdump", ignore_status=True).stdout:
             raise signals.TestFailure("Failed to clean up tcpdump process.")
         if self.path_exists(TCPDUMP_DIR):
-            self.ssh.run("rm -f  %s/*" % TCPDUMP_DIR)
+            self.ssh.run(f"rm -f  {TCPDUMP_DIR}/*")
 
     def _get_tcpdump_pid(self, tcpdump_file_name):
         """Check tcpdump process on OpenWrt."""
-        return self.ssh.run(
-            "pgrep -f %s" % (tcpdump_file_name), ignore_status=True
-        ).stdout
+        return self.ssh.run(f"pgrep -f {tcpdump_file_name}", ignore_status=True).stdout
 
     def setup_mdns(self):
         self.config.add("setup_mdns")
@@ -1062,18 +1067,18 @@
              fas_port: Port for captive portal page.
         """
         self.package_install(CAPTIVE_PORTAL_PACKAGE)
-        self.config.add("setup_captive_portal %s" % fas_port)
+        self.config.add(f"setup_captive_portal {fas_port}")
         self.ssh.run("uci set opennds.@opennds[0].fas_secure_enabled=2")
         self.ssh.run("uci set opennds.@opennds[0].gatewayport=2050")
-        self.ssh.run("uci set opennds.@opennds[0].fasport=%s" % fas_port)
-        self.ssh.run("uci set opennds.@opennds[0].fasremotefqdn=%s" % fas_fdqn)
+        self.ssh.run(f"uci set opennds.@opennds[0].fasport={fas_port}")
+        self.ssh.run(f"uci set opennds.@opennds[0].fasremotefqdn={fas_fdqn}")
         self.ssh.run('uci set opennds.@opennds[0].faspath="/nds/fas-aes.php"')
         self.ssh.run("uci set opennds.@opennds[0].faskey=1234567890")
         self.service_manager.need_restart(SERVICE_OPENNDS)
         # Config uhttpd
         self.ssh.run("uci set uhttpd.main.interpreter=.php=/usr/bin/php-cgi")
-        self.ssh.run("uci add_list uhttpd.main.listen_http=0.0.0.0:%s" % fas_port)
-        self.ssh.run("uci add_list uhttpd.main.listen_http=[::]:%s" % fas_port)
+        self.ssh.run(f"uci add_list uhttpd.main.listen_http=0.0.0.0:{fas_port}")
+        self.ssh.run(f"uci add_list uhttpd.main.listen_http=[::]:{fas_port}")
         self.service_manager.need_restart(SERVICE_UHTTPD)
         # cp fas-aes.php
         self.create_folder("/www/nds/")
@@ -1082,7 +1087,7 @@
         self.add_resource_record(fas_fdqn, LOCALHOST)
         self.commit_changes()
 
-    def remove_cpative_portal(self, fas_port=2080):
+    def remove_cpative_portal(self, fas_port: int = 2080):
         """Remove captive portal.
 
         Args:
@@ -1096,12 +1101,12 @@
         self.clear_resource_record()
         # Restore uhttpd
         self.ssh.run("uci del uhttpd.main.interpreter")
-        self.ssh.run("uci del_list uhttpd.main.listen_http='0.0.0.0:%s'" % fas_port)
-        self.ssh.run("uci del_list uhttpd.main.listen_http='[::]:%s'" % fas_port)
+        self.ssh.run(f"uci del_list uhttpd.main.listen_http='0.0.0.0:{fas_port}'")
+        self.ssh.run(f"uci del_list uhttpd.main.listen_http='[::]:{fas_port}'")
         self.service_manager.need_restart(SERVICE_UHTTPD)
         # Clean web root
         self.ssh.run("rm -r /www/nds")
-        self.config.discard("setup_captive_portal %s" % fas_port)
+        self.config.discard(f"setup_captive_portal {fas_port}")
         self.commit_changes()
 
 
@@ -1119,19 +1124,19 @@
 
     def enable(self, service_name):
         """Enable service auto start."""
-        self.ssh.run("/etc/init.d/%s enable" % service_name)
+        self.ssh.run(f"/etc/init.d/{service_name} enable")
 
     def disable(self, service_name):
         """Disable service auto start."""
-        self.ssh.run("/etc/init.d/%s disable" % service_name)
+        self.ssh.run(f"/etc/init.d/{service_name} disable")
 
     def restart(self, service_name):
         """Restart the service."""
-        self.ssh.run("/etc/init.d/%s restart" % service_name)
+        self.ssh.run(f"/etc/init.d/{service_name} restart")
 
     def reload(self, service_name):
         """Restart the service."""
-        self.ssh.run("/etc/init.d/%s reload" % service_name)
+        self.ssh.run(f"/etc/init.d/{service_name} reload")
 
     def restart_services(self):
         """Restart all services need to restart."""
@@ -1143,7 +1148,7 @@
 
     def stop(self, service_name):
         """Stop the service."""
-        self.ssh.run("/etc/init.d/%s stop" % service_name)
+        self.ssh.run(f"/etc/init.d/{service_name} stop")
 
     def need_restart(self, service_name):
         self._need_restart.add(service_name)
diff --git a/src/antlion/controllers/openwrt_lib/openwrt_constants.py b/packages/antlion/controllers/openwrt_lib/openwrt_constants.py
similarity index 100%
rename from src/antlion/controllers/openwrt_lib/openwrt_constants.py
rename to packages/antlion/controllers/openwrt_lib/openwrt_constants.py
diff --git a/src/antlion/controllers/openwrt_lib/wireless_config.py b/packages/antlion/controllers/openwrt_lib/wireless_config.py
similarity index 70%
rename from src/antlion/controllers/openwrt_lib/wireless_config.py
rename to packages/antlion/controllers/openwrt_lib/wireless_config.py
index 9cdb309..d97e197 100644
--- a/src/antlion/controllers/openwrt_lib/wireless_config.py
+++ b/packages/antlion/controllers/openwrt_lib/wireless_config.py
@@ -1,6 +1,6 @@
 """Class for Wireless config."""
 
-NET_IFACE = "lan"
+from antlion.controllers.ap_lib.hostapd_security import OpenWRTEncryptionMode
 
 
 class WirelessConfig(object):
@@ -24,19 +24,19 @@
 
     def __init__(
         self,
-        name,
-        ssid,
-        security,
-        band,
-        iface=NET_IFACE,
-        password=None,
-        wep_key=None,
-        wep_key_num=1,
-        radius_server_ip=None,
-        radius_server_port=None,
-        radius_server_secret=None,
-        hidden=False,
-        ieee80211w=None,
+        name: str,
+        ssid: str,
+        security: OpenWRTEncryptionMode,
+        band: str,
+        iface: str = "lan",
+        password: str | None = None,
+        wep_key: list[str] | None = None,
+        wep_key_num: int = 1,
+        radius_server_ip: str | None = None,
+        radius_server_port: int | None = None,
+        radius_server_secret: str | None = None,
+        hidden: bool = False,
+        ieee80211w: int | None = None,
     ):
         self.name = name
         self.ssid = ssid
diff --git a/src/antlion/controllers/openwrt_lib/wireless_settings_applier.py b/packages/antlion/controllers/openwrt_lib/wireless_settings_applier.py
similarity index 61%
rename from src/antlion/controllers/openwrt_lib/wireless_settings_applier.py
rename to packages/antlion/controllers/openwrt_lib/wireless_settings_applier.py
index d899a30..da0d2d7 100644
--- a/src/antlion/controllers/openwrt_lib/wireless_settings_applier.py
+++ b/packages/antlion/controllers/openwrt_lib/wireless_settings_applier.py
@@ -3,8 +3,11 @@
 import time
 
 from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.openwrt_lib.network_settings import SERVICE_DNSMASQ
-from antlion.controllers.openwrt_lib.network_settings import ServiceManager
+from antlion.controllers.openwrt_lib.network_settings import (
+    SERVICE_DNSMASQ,
+    ServiceManager,
+)
+from antlion.controllers.openwrt_lib.wireless_config import WirelessConfig
 
 LEASE_FILE = "/tmp/dhcp.leases"
 OPEN_SECURITY = "none"
@@ -48,7 +51,7 @@
         """
         self.ssh = ssh
         self.service_manager = ServiceManager(ssh)
-        self.wireless_configs = configs
+        self.wireless_configs: list[WirelessConfig] = configs
         self.channel_2g = channel_2g
         self.channel_5g = channel_5g
         self.radio_2g = radio_2g
@@ -56,52 +59,38 @@
 
     def apply_wireless_settings(self):
         """Configure wireless settings from a list of configs."""
-        default_2g_iface = "default_" + self.radio_2g
-        default_5g_iface = "default_" + self.radio_5g
+        default_2g_iface = f"default_{self.radio_2g}"
+        default_5g_iface = f"default_{self.radio_5g}"
 
         # set channels for 2G and 5G bands
-        self.ssh.run(
-            "uci set wireless.%s.channel='%s'" % (self.radio_2g, self.channel_2g)
-        )
-        self.ssh.run(
-            "uci set wireless.%s.channel='%s'" % (self.radio_5g, self.channel_5g)
-        )
+        self.ssh.run(f"uci set wireless.{self.radio_2g}.channel='{self.channel_2g}'")
+        self.ssh.run(f"uci set wireless.{self.radio_5g}.channel='{self.channel_5g}'")
         if self.channel_5g == 165:
-            self.ssh.run("uci set wireless.%s.htmode='VHT20'" % self.radio_5g)
+            self.ssh.run(f"uci set wireless.{self.radio_5g}.htmode='VHT20'")
         elif self.channel_5g == 132 or self.channel_5g == 136:
             self.ssh.run("iw reg set ZA")
-            self.ssh.run("uci set wireless.%s.htmode='VHT40'" % self.radio_5g)
+            self.ssh.run(f"uci set wireless.{self.radio_5g}.htmode='VHT40'")
 
         if self.channel_2g == 13:
             self.ssh.run("iw reg set AU")
 
         # disable default OpenWrt SSID
-        self.ssh.run(
-            "uci set wireless.%s.disabled='%s'" % (default_2g_iface, DISABLE_RADIO)
-        )
-        self.ssh.run(
-            "uci set wireless.%s.disabled='%s'" % (default_5g_iface, DISABLE_RADIO)
-        )
+        self.ssh.run(f"uci set wireless.{default_2g_iface}.disabled='{DISABLE_RADIO}'")
+        self.ssh.run(f"uci set wireless.{default_5g_iface}.disabled='{DISABLE_RADIO}'")
 
         # Enable radios
-        self.ssh.run(
-            "uci set wireless.%s.disabled='%s'" % (self.radio_2g, ENABLE_RADIO)
-        )
-        self.ssh.run(
-            "uci set wireless.%s.disabled='%s'" % (self.radio_5g, ENABLE_RADIO)
-        )
+        self.ssh.run(f"uci set wireless.{self.radio_2g}.disabled='{ENABLE_RADIO}'")
+        self.ssh.run(f"uci set wireless.{self.radio_5g}.disabled='{ENABLE_RADIO}'")
 
         for config in self.wireless_configs:
             # configure open network
             if config.security == OPEN_SECURITY:
                 if config.band == hostapd_constants.BAND_2G:
                     self.ssh.run(
-                        "uci set wireless.%s.ssid='%s'"
-                        % (default_2g_iface, config.ssid)
+                        f"uci set wireless.{default_2g_iface}.ssid='{config.ssid}'"
                     )
                     self.ssh.run(
-                        "uci set wireless.%s.disabled='%s'"
-                        % (default_2g_iface, ENABLE_RADIO)
+                        f"uci set wireless.{default_2g_iface}.disabled='{ENABLE_RADIO}'"
                     )
                     if config.hidden:
                         self.ssh.run(
@@ -110,12 +99,10 @@
                         )
                 elif config.band == hostapd_constants.BAND_5G:
                     self.ssh.run(
-                        "uci set wireless.%s.ssid='%s'"
-                        % (default_5g_iface, config.ssid)
+                        f"uci set wireless.{default_5g_iface}.ssid='{config.ssid}'"
                     )
                     self.ssh.run(
-                        "uci set wireless.%s.disabled='%s'"
-                        % (default_5g_iface, ENABLE_RADIO)
+                        f"uci set wireless.{default_5g_iface}.disabled='{ENABLE_RADIO}'"
                     )
                     if config.hidden:
                         self.ssh.run(
@@ -124,22 +111,16 @@
                         )
                 continue
 
-            self.ssh.run("uci set wireless.%s='wifi-iface'" % config.name)
+            self.ssh.run(f"uci set wireless.{config.name}='wifi-iface'")
             if config.band == hostapd_constants.BAND_2G:
-                self.ssh.run(
-                    "uci set wireless.%s.device='%s'" % (config.name, self.radio_2g)
-                )
+                self.ssh.run(f"uci set wireless.{config.name}.device='{self.radio_2g}'")
             else:
-                self.ssh.run(
-                    "uci set wireless.%s.device='%s'" % (config.name, self.radio_5g)
-                )
+                self.ssh.run(f"uci set wireless.{config.name}.device='{self.radio_5g}'")
+            self.ssh.run(f"uci set wireless.{config.name}.network='{config.iface}'")
+            self.ssh.run(f"uci set wireless.{config.name}.mode='ap'")
+            self.ssh.run(f"uci set wireless.{config.name}.ssid='{config.ssid}'")
             self.ssh.run(
-                "uci set wireless.%s.network='%s'" % (config.name, config.iface)
-            )
-            self.ssh.run("uci set wireless.%s.mode='ap'" % config.name)
-            self.ssh.run("uci set wireless.%s.ssid='%s'" % (config.name, config.ssid))
-            self.ssh.run(
-                "uci set wireless.%s.encryption='%s'" % (config.name, config.security)
+                f"uci set wireless.{config.name}.encryption='{config.security}'"
             )
             if (
                 config.security == PSK_SECURITY
@@ -147,16 +128,14 @@
                 or config.security == PSK1_SECURITY
                 or config.security == SAEMIXED_SECURITY
             ):
-                self.ssh.run(
-                    "uci set wireless.%s.key='%s'" % (config.name, config.password)
-                )
+                self.ssh.run(f"uci set wireless.{config.name}.key='{config.password}'")
             elif config.security == WEP_SECURITY:
                 self.ssh.run(
                     "uci set wireless.%s.key%s='%s'"
                     % (config.name, config.wep_key_num, config.wep_key)
                 )
                 self.ssh.run(
-                    "uci set wireless.%s.key='%s'" % (config.name, config.wep_key_num)
+                    f"uci set wireless.{config.name}.key='{config.wep_key_num}'"
                 )
             elif config.security == ENT_SECURITY:
                 self.ssh.run(
@@ -173,16 +152,13 @@
                 )
             if config.ieee80211w:
                 self.ssh.run(
-                    "uci set wireless.%s.ieee80211w='%s'"
-                    % (config.name, config.ieee80211w)
+                    f"uci set wireless.{config.name}.ieee80211w='{config.ieee80211w}'"
                 )
             if config.hidden:
-                self.ssh.run(
-                    "uci set wireless.%s.hidden='%s'" % (config.name, ENABLE_HIDDEN)
-                )
+                self.ssh.run(f"uci set wireless.{config.name}.hidden='{ENABLE_HIDDEN}'")
 
         self.ssh.run("uci commit wireless")
-        self.ssh.run("cp %s %s.tmp" % (LEASE_FILE, LEASE_FILE))
+        self.ssh.run(f"cp {LEASE_FILE} {LEASE_FILE}.tmp")
 
     def cleanup_wireless_settings(self):
         """Reset wireless settings to default."""
@@ -191,6 +167,6 @@
         self.ssh.run("wifi config")
         if self.channel_5g == 132:
             self.ssh.run("iw reg set US")
-        self.ssh.run("cp %s.tmp %s" % (LEASE_FILE, LEASE_FILE))
+        self.ssh.run(f"cp {LEASE_FILE}.tmp {LEASE_FILE}")
         self.service_manager.restart(SERVICE_DNSMASQ)
         time.sleep(9)
diff --git a/src/antlion/controllers/packet_capture.py b/packages/antlion/controllers/packet_capture.py
similarity index 66%
rename from src/antlion/controllers/packet_capture.py
rename to packages/antlion/controllers/packet_capture.py
index ce3d8fd..5b753df 100755
--- a/src/antlion/controllers/packet_capture.py
+++ b/packages/antlion/controllers/packet_capture.py
@@ -14,23 +14,27 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import annotations
+
+import io
+import logging
 import os
 import threading
 import time
+from dataclasses import dataclass
 
-from antlion import logger
-from antlion.controllers.ap_lib.hostapd_constants import FREQUENCY_MAP
-from antlion.controllers.ap_lib.hostapd_constants import CENTER_CHANNEL_MAP
-from antlion.controllers.ap_lib.hostapd_constants import VHT_CHANNEL
-from antlion.controllers.utils_lib.ssh import connection
-from antlion.controllers.utils_lib.ssh import formatter
-from antlion.controllers.utils_lib.ssh import settings
+from mobly import asserts, logger
+
+from antlion.controllers.ap_lib.hostapd_constants import (
+    CENTER_CHANNEL_MAP,
+    FREQUENCY_MAP,
+    VHT_CHANNEL,
+)
+from antlion.controllers.utils_lib.ssh import connection, formatter, settings
 from antlion.libs.proc.process import Process
+from antlion.types import ControllerConfig, Json
 
-from mobly import asserts
-
-MOBLY_CONTROLLER_CONFIG_NAME = "PacketCapture"
-ACTS_CONTROLLER_REFERENCE_NAME = "packet_capture"
+MOBLY_CONTROLLER_CONFIG_NAME: str = "PacketCapture"
 BSS = "BSS"
 BSSID = "BSSID"
 FREQ = "freq"
@@ -46,33 +50,31 @@
 SSID = "SSID"
 
 
-def create(configs):
+def create(configs: list[ControllerConfig]) -> list[PacketCapture]:
     return [PacketCapture(c) for c in configs]
 
 
-def destroy(pcaps):
-    for pcap in pcaps:
+def destroy(objects: list[PacketCapture]) -> None:
+    for pcap in objects:
         pcap.close()
 
 
-def get_info(pcaps):
-    return [pcap.ssh_settings.hostname for pcap in pcaps]
+def get_info(objects: list[PacketCapture]) -> list[Json]:
+    return [pcap.ssh_settings.hostname for pcap in objects]
 
 
-class PcapProperties(object):
-    """Class to maintain packet capture properties after starting tcpdump.
+@dataclass(frozen=True)
+class PcapProperties:
+    """Packet capture properties."""
 
-    Attributes:
-        proc: Process object of tcpdump
-        pcap_fname: File name of the tcpdump output file
-        pcap_file: File object for the tcpdump output file
-    """
+    proc: Process
+    """Process object of tcpdump."""
 
-    def __init__(self, proc, pcap_fname, pcap_file):
-        """Initialize object."""
-        self.proc = proc
-        self.pcap_fname = pcap_fname
-        self.pcap_file = pcap_file
+    pcap_fname: str
+    """File name of the tcpdump output file."""
+
+    pcap_file: io.BufferedRandom
+    """File object for the tcpdump output file."""
 
 
 class PacketCaptureError(Exception):
@@ -91,7 +93,7 @@
             band.
     """
 
-    def __init__(self, configs):
+    def __init__(self, configs: ControllerConfig) -> None:
         """Initialize objects.
 
         Args:
@@ -99,48 +101,50 @@
         """
         self.ssh_settings = settings.from_config(configs["ssh_config"])
         self.ssh = connection.SshConnection(self.ssh_settings)
-        self.log = logger.create_logger(
-            lambda msg: "[%s|%s] %s"
-            % (MOBLY_CONTROLLER_CONFIG_NAME, self.ssh_settings.hostname, msg)
+        self.log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[PacketCapture|{self.ssh_settings.hostname}]",
+            },
         )
 
         self._create_interface(MON_2G, "monitor")
         self._create_interface(MON_5G, "monitor")
         self.managed_mode = True
         result = self.ssh.run("ifconfig -a", ignore_status=True)
-        if result.stderr or SCAN_IFACE not in result.stdout:
+        if result.stderr or SCAN_IFACE not in result.stdout.decode("utf-8"):
             self.managed_mode = False
         if self.managed_mode:
             self._create_interface(SCAN_IFACE, "managed")
 
-        self.pcap_properties = dict()
+        self.pcap_properties: dict[str, PcapProperties] = {}
         self._pcap_stop_lock = threading.Lock()
 
-    def _create_interface(self, iface, mode):
+    def _create_interface(self, iface: str, mode: str) -> None:
         """Create interface of monitor/managed mode.
 
         Create mon0/mon1 for 2G/5G monitor mode and wlan2 for managed mode.
         """
         if mode == "monitor":
-            self.ssh.run("ifconfig wlan%s down" % iface[-1], ignore_status=True)
-        self.ssh.run("iw dev %s del" % iface, ignore_status=True)
+            self.ssh.run(f"ifconfig wlan{iface[-1]} down", ignore_status=True)
+        self.ssh.run(f"iw dev {iface} del", ignore_status=True)
         self.ssh.run(
-            "iw phy%s interface add %s type %s" % (iface[-1], iface, mode),
+            f"iw phy{iface[-1]} interface add {iface} type {mode}",
             ignore_status=True,
         )
-        self.ssh.run("ip link set %s up" % iface, ignore_status=True)
-        result = self.ssh.run("iw dev %s info" % iface, ignore_status=True)
-        if result.stderr or iface not in result.stdout:
-            raise PacketCaptureError("Failed to configure interface %s" % iface)
+        self.ssh.run(f"ip link set {iface} up", ignore_status=True)
+        result = self.ssh.run(f"iw dev {iface} info", ignore_status=True)
+        if result.stderr or iface not in result.stdout.decode("utf-8"):
+            raise PacketCaptureError(f"Failed to configure interface {iface}")
 
-    def _cleanup_interface(self, iface):
+    def _cleanup_interface(self, iface: str) -> None:
         """Clean up monitor mode interfaces."""
-        self.ssh.run("iw dev %s del" % iface, ignore_status=True)
-        result = self.ssh.run("iw dev %s info" % iface, ignore_status=True)
-        if not result.stderr or "No such device" not in result.stderr:
-            raise PacketCaptureError("Failed to cleanup monitor mode for %s" % iface)
+        self.ssh.run(f"iw dev {iface} del", ignore_status=True)
+        result = self.ssh.run(f"iw dev {iface} info", ignore_status=True)
+        if not result.stderr or "No such device" not in result.stderr.decode("utf-8"):
+            raise PacketCaptureError(f"Failed to cleanup monitor mode for {iface}")
 
-    def _parse_scan_results(self, scan_result):
+    def _parse_scan_results(self, scan_result: str) -> list[dict[str, str | int]]:
         """Parses the scan dump output and returns list of dictionaries.
 
         Args:
@@ -154,8 +158,8 @@
                 c.) FREQUENCY - WiFi band the network is on.
                 d.) BSSID - BSSID of the network.
         """
-        scan_networks = []
-        network = {}
+        scan_networks: list[dict[str, str | int]] = []
+        network: dict[str, str | int] = {}
         for line in scan_result.splitlines():
             if SEP not in line:
                 continue
@@ -173,7 +177,7 @@
                 network = {}
         return scan_networks
 
-    def get_wifi_scan_results(self):
+    def get_wifi_scan_results(self) -> list[dict[str, str | int]]:
         """Starts a wifi scan on wlan2 interface.
 
         Returns:
@@ -181,14 +185,14 @@
         """
         if not self.managed_mode:
             raise PacketCaptureError("Managed mode not setup")
-        result = self.ssh.run("iw dev %s scan" % SCAN_IFACE)
+        result = self.ssh.run(f"iw dev {SCAN_IFACE} scan")
         if result.stderr:
             raise PacketCaptureError("Failed to get scan dump")
         if not result.stdout:
             return []
-        return self._parse_scan_results(result.stdout)
+        return self._parse_scan_results(result.stdout.decode("utf-8"))
 
-    def start_scan_and_find_network(self, ssid):
+    def start_scan_and_find_network(self, ssid: str) -> bool:
         """Start a wifi scan on wlan2 interface and find network.
 
         Args:
@@ -206,7 +210,9 @@
             time.sleep(3)  # sleep before next scan
         return False
 
-    def configure_monitor_mode(self, band, channel, bandwidth=20):
+    def configure_monitor_mode(
+        self, band: str, channel: int, bandwidth: int = 20
+    ) -> bool:
         """Configure monitor mode.
 
         Args:
@@ -226,9 +232,7 @@
 
         iface = BAND_IFACE[band]
         if bandwidth == 20:
-            self.ssh.run(
-                "iw dev %s set channel %s" % (iface, channel), ignore_status=True
-            )
+            self.ssh.run(f"iw dev {iface} set channel {channel}", ignore_status=True)
         else:
             center_freq = None
             for i, j in CENTER_CHANNEL_MAP[VHT_CHANNEL[bandwidth]]["channels"]:
@@ -242,13 +246,15 @@
                 ignore_status=True,
             )
 
-        result = self.ssh.run("iw dev %s info" % iface, ignore_status=True)
-        if result.stderr or "channel %s" % channel not in result.stdout:
-            self.log.error("Failed to configure monitor mode for %s" % band)
+        result = self.ssh.run(f"iw dev {iface} info", ignore_status=True)
+        if result.stderr or f"channel {channel}" not in result.stdout.decode("utf-8"):
+            self.log.error(f"Failed to configure monitor mode for {band}")
             return False
         return True
 
-    def start_packet_capture(self, band, log_path, pcap_fname):
+    def start_packet_capture(
+        self, band: str, log_path: str, pcap_fname: str
+    ) -> Process | None:
         """Start packet capture for band.
 
         band = 2G starts tcpdump on 'mon0' interface.
@@ -267,22 +273,28 @@
             self.log.error("Invalid band or packet capture already running")
             return None
 
-        pcap_name = "%s_%s.pcap" % (pcap_fname, band)
+        pcap_name = f"{pcap_fname}_{band}.pcap"
         pcap_fname = os.path.join(log_path, pcap_name)
         pcap_file = open(pcap_fname, "w+b")
 
-        tcpdump_cmd = "tcpdump -i %s -w - -U 2>/dev/null" % (BAND_IFACE[band])
+        tcpdump_cmd = f"tcpdump -i {BAND_IFACE[band]} -w - -U 2>/dev/null"
         cmd = formatter.SshFormatter().format_command(
-            tcpdump_cmd, None, self.ssh_settings, extra_flags={"-q": None}
+            tcpdump_cmd, self.ssh_settings, extra_flags={"-q": None}
         )
         pcap_proc = Process(cmd)
-        pcap_proc.set_on_output_callback(lambda msg: pcap_file.write(msg), binary=True)
+
+        def write_to_pcap(data: bytes | str) -> None:
+            if isinstance(data, str):
+                data = data.encode("utf-8")
+            pcap_file.write(data)
+
+        pcap_proc.set_on_output_callback(write_to_pcap, binary=True)
         pcap_proc.start()
 
         self.pcap_properties[band] = PcapProperties(pcap_proc, pcap_fname, pcap_file)
         return pcap_proc
 
-    def stop_packet_capture(self, proc):
+    def stop_packet_capture(self, proc: Process) -> None:
         """Stop the packet capture.
 
         Args:
@@ -300,7 +312,7 @@
             self.pcap_properties[key].pcap_file.close()
             del self.pcap_properties[key]
 
-    def close(self):
+    def close(self) -> None:
         """Cleanup.
 
         Cleans up all the monitor mode interfaces and closes ssh connections.
diff --git a/packages/antlion/controllers/pdu.py b/packages/antlion/controllers/pdu.py
new file mode 100644
index 0000000..503d964
--- /dev/null
+++ b/packages/antlion/controllers/pdu.py
@@ -0,0 +1,293 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+import enum
+import logging
+import time
+from enum import IntEnum, unique
+from typing import Protocol
+
+from antlion.types import ControllerConfig, Json
+from antlion.validation import MapValidator
+
+MOBLY_CONTROLLER_CONFIG_NAME: str = "PduDevice"
+
+# Allow time for capacitors to discharge.
+DEFAULT_REBOOT_DELAY_SEC = 5.0
+
+
+class PduType(enum.StrEnum):
+    NP02B = "synaccess.np02b"
+    WEBPOWERSWITCH = "digital_loggers.webpowerswitch"
+
+
+class PduError(Exception):
+    """An exception for use within PduDevice implementations"""
+
+
+def create(configs: list[ControllerConfig]) -> list[PduDevice]:
+    """Creates a PduDevice for each config in configs.
+
+    Args:
+        configs: List of configs from PduDevice field.
+            Fields:
+                device: a string "<brand>.<model>" that corresponds to module
+                    in pdu_lib/
+                host: a string of the device ip address
+                username (optional): a string of the username for device sign-in
+                password (optional): a string of the password for device sign-in
+    Return:
+        A list of PduDevice objects.
+    """
+    pdus: list[PduDevice] = []
+    for config in configs:
+        c = MapValidator(config)
+        device = c.get(str, "device")
+        pduType = PduType(device)
+
+        host = c.get(str, "host")
+        username = c.get(str, "username", None)
+        password = c.get(str, "password", None)
+
+        match pduType:
+            case PduType.NP02B:
+                from antlion.controllers.pdu_lib.synaccess.np02b import (
+                    PduDevice as NP02B,
+                )
+
+                pdus.append(NP02B(host, username, password))
+            case PduType.WEBPOWERSWITCH:
+                from antlion.controllers.pdu_lib.digital_loggers.webpowerswitch import (
+                    PduDevice as WebPowerSwitch,
+                )
+
+                pdus.append(WebPowerSwitch(host, username, password))
+    return pdus
+
+
+def destroy(objects: list[PduDevice]) -> None:
+    """Ensure any connections to devices are closed.
+
+    Args:
+        pdu_list: A list of PduDevice objects.
+    """
+    for pdu in objects:
+        pdu.close()
+
+
+def get_info(objects: list[PduDevice]) -> list[Json]:
+    """Retrieves info from a list of PduDevice objects.
+
+    Args:
+        pdu_list: A list of PduDevice objects.
+    Return:
+        A list containing a dictionary for each PduDevice, with keys:
+            'host': a string of the device ip address
+            'username': a string of the username
+            'password': a string of the password
+    """
+    info: list[Json] = []
+    for pdu in objects:
+        info.append(
+            {"host": pdu.host, "username": pdu.username, "password": pdu.password}
+        )
+    return info
+
+
+def get_pdu_port_for_device(
+    device_pdu_config: dict[str, Json], pdus: list[PduDevice]
+) -> tuple[PduDevice, int]:
+    """Retrieves the pdu object and port of that PDU powering a given device.
+    This is especially necessary when there are multilpe devices on a single PDU
+    or multiple PDUs registered.
+
+    Args:
+        device_pdu_config: a dict, representing the config of the device.
+        pdus: a list of registered PduDevice objects.
+
+    Returns:
+        A tuple: (PduObject for the device, string port number on that PDU).
+
+    Raises:
+        ValueError, if there is no PDU matching the given host in the config.
+
+    Example ACTS config:
+        ...
+        "testbed": [
+            ...
+            "FuchsiaDevice": [
+                {
+                    "ip": "<device_ip>",
+                    "ssh_config": "/path/to/sshconfig",
+                    "PduDevice": {
+                        "host": "192.168.42.185",
+                        "port": 2
+                    }
+                }
+            ],
+            "AccessPoint": [
+                {
+                    "ssh_config": {
+                        ...
+                    },
+                    "PduDevice": {
+                        "host": "192.168.42.185",
+                        "port" 1
+                    }
+                }
+            ],
+            "PduDevice": [
+                {
+                    "device": "synaccess.np02b",
+                    "host": "192.168.42.185"
+                }
+            ]
+        ],
+        ...
+    """
+    config = MapValidator(device_pdu_config)
+    pdu_ip = config.get(str, "host")
+    port = config.get(int, "port")
+    for pdu in pdus:
+        if pdu.host == pdu_ip:
+            return pdu, port
+    raise ValueError(f"No PduDevice with host: {pdu_ip}")
+
+
+class PDU(Protocol):
+    """Control power delivery to a device with a PDU."""
+
+    def port(self, index: int) -> Port:
+        """Access a single port.
+
+        Args:
+            index: Index of the port, likely the number identifier above the outlet.
+
+        Returns:
+            Controller for the specified port.
+        """
+        ...
+
+    def __len__(self) -> int:
+        """Count the number of ports.
+
+        Returns:
+            Number of ports on this PDU.
+        """
+        ...
+
+
+class Port(Protocol):
+    """Controlling the power delivery to a single port of a PDU."""
+
+    def status(self) -> PowerState:
+        """Return the power state for this port.
+
+        Returns:
+            Power state
+        """
+        ...
+
+    def set(self, state: PowerState) -> None:
+        """Set the power state for this port.
+
+        Args:
+            state: Desired power state
+        """
+        ...
+
+    def reboot(self, delay_sec: float = DEFAULT_REBOOT_DELAY_SEC) -> None:
+        """Set the power state OFF then ON after a delay.
+
+        Args:
+            delay_sec: Length to wait before turning back ON. This is important to allow
+                the device's capacitors to discharge.
+        """
+        self.set(PowerState.OFF)
+        time.sleep(delay_sec)
+        self.set(PowerState.ON)
+
+
+@unique
+class PowerState(IntEnum):
+    OFF = 0
+    ON = 1
+
+
+class PduDevice(object):
+    """An object that defines the basic Pdu functionality and abstracts
+    the actual hardware.
+
+    This is a pure abstract class. Implementations should be of the same
+    class name (eg. class PduDevice(pdu.PduDevice)) and exist in
+    pdu_lib/<brand>/<device_name>.py. PduDevice objects should not be
+    instantiated by users directly.
+
+    TODO(http://b/318877544): Replace PduDevice with PDU
+    """
+
+    def __init__(self, host: str, username: str | None, password: str | None) -> None:
+        if type(self) is PduDevice:
+            raise NotImplementedError("Base class: cannot be instantiated directly")
+        self.host = host
+        self.username = username
+        self.password = password
+        self.log = logging.getLogger()
+
+    def on_all(self) -> None:
+        """Turns on all outlets on the device."""
+        raise NotImplementedError("Base class: cannot be called directly")
+
+    def off_all(self) -> None:
+        """Turns off all outlets on the device."""
+        raise NotImplementedError("Base class: cannot be called directly")
+
+    def on(self, outlet: int) -> None:
+        """Turns on specific outlet on the device.
+        Args:
+            outlet: index of the outlet to turn on.
+        """
+        raise NotImplementedError("Base class: cannot be called directly")
+
+    def off(self, outlet: int) -> None:
+        """Turns off specific outlet on the device.
+        Args:
+            outlet: index of the outlet to turn off.
+        """
+        raise NotImplementedError("Base class: cannot be called directly")
+
+    def reboot(self, outlet: int) -> None:
+        """Toggles a specific outlet on the device to off, then to on.
+        Args:
+            outlet: index of the outlet to reboot.
+        """
+        raise NotImplementedError("Base class: cannot be called directly")
+
+    def status(self) -> dict[str, bool]:
+        """Retrieves the status of the outlets on the device.
+
+        Return:
+            A dictionary matching outlet string to:
+                True: if outlet is On
+                False: if outlet is Off
+        """
+        raise NotImplementedError("Base class: cannot be called directly")
+
+    def close(self) -> None:
+        """Closes connection to the device."""
+        raise NotImplementedError("Base class: cannot be called directly")
diff --git a/src/antlion/controllers/pdu_lib/__init__.py b/packages/antlion/controllers/pdu_lib/__init__.py
similarity index 100%
rename from src/antlion/controllers/pdu_lib/__init__.py
rename to packages/antlion/controllers/pdu_lib/__init__.py
diff --git a/src/antlion/controllers/pdu_lib/digital_loggers/__init__.py b/packages/antlion/controllers/pdu_lib/digital_loggers/__init__.py
similarity index 100%
rename from src/antlion/controllers/pdu_lib/digital_loggers/__init__.py
rename to packages/antlion/controllers/pdu_lib/digital_loggers/__init__.py
diff --git a/src/antlion/controllers/pdu_lib/digital_loggers/webpowerswitch.py b/packages/antlion/controllers/pdu_lib/digital_loggers/webpowerswitch.py
similarity index 94%
rename from src/antlion/controllers/pdu_lib/digital_loggers/webpowerswitch.py
rename to packages/antlion/controllers/pdu_lib/digital_loggers/webpowerswitch.py
index 1154f95..660e965 100644
--- a/src/antlion/controllers/pdu_lib/digital_loggers/webpowerswitch.py
+++ b/packages/antlion/controllers/pdu_lib/digital_loggers/webpowerswitch.py
@@ -14,7 +14,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from antlion import signals
+from mobly import signals
+
 from antlion.controllers import pdu
 
 # Create an optional dependency for dlipower since it has a transitive
@@ -43,7 +44,7 @@
         - Ethernet Power Controller III
     """
 
-    def __init__(self, host, username, password):
+    def __init__(self, host: str, username: str | None, password: str | None) -> None:
         """
         Note: This may require allowing plaintext password sign in on the
         power switch, which can be configure in the device's control panel.
@@ -67,7 +68,7 @@
                 "userid, or password?"
             )
         else:
-            self.log.info("Connected to WebPowerSwitch (%s)." % host)
+            self.log.info(f"Connected to WebPowerSwitch ({host}).")
 
     def on_all(self):
         """Turn on power to all outlets."""
@@ -144,9 +145,7 @@
             if actual_state == expected_state:
                 return
             else:
-                self.log.debug(
-                    "Outlet %s not yet in state %s" % (outlet, expected_state)
-                )
+                self.log.debug(f"Outlet {outlet} not yet in state {expected_state}")
         raise pdu.PduError(
             "Outlet %s on WebPowerSwitch (%s) failed to reach expected state. \n"
             "Expected State: %s\n"
diff --git a/src/antlion/controllers/pdu_lib/synaccess/__init__.py b/packages/antlion/controllers/pdu_lib/synaccess/__init__.py
similarity index 100%
rename from src/antlion/controllers/pdu_lib/synaccess/__init__.py
rename to packages/antlion/controllers/pdu_lib/synaccess/__init__.py
diff --git a/packages/antlion/controllers/pdu_lib/synaccess/np02b.py b/packages/antlion/controllers/pdu_lib/synaccess/np02b.py
new file mode 100644
index 0000000..d977a2a
--- /dev/null
+++ b/packages/antlion/controllers/pdu_lib/synaccess/np02b.py
@@ -0,0 +1,268 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+import logging
+import urllib.parse
+import urllib.request
+from dataclasses import dataclass
+from enum import StrEnum, unique
+from typing import Protocol
+
+from mobly import signals
+from mobly.logger import PrefixLoggerAdapter
+
+from antlion.controllers import pdu
+
+
+class PduDevice(pdu.PduDevice):
+    """Implementation of pure abstract PduDevice object for the Synaccess np02b
+    Pdu.
+
+    TODO(http://b/318877544): Replace with NP02B
+    """
+
+    def __init__(self, host: str, username: str | None, password: str | None) -> None:
+        username = username or "admin"  # default username
+        password = password or "admin"  # default password
+        super().__init__(host, username, password)
+        self.np02b = NP02B(host, username, password)
+
+    def on_all(self) -> None:
+        for i in range(len(self.np02b)):
+            self.np02b.port(i).set(pdu.PowerState.ON)
+
+    def off_all(self) -> None:
+        for i in range(len(self.np02b)):
+            self.np02b.port(i).set(pdu.PowerState.OFF)
+
+    def on(self, outlet: int) -> None:
+        self.np02b.port(outlet).set(pdu.PowerState.ON)
+
+    def off(self, outlet: int) -> None:
+        self.np02b.port(outlet).set(pdu.PowerState.OFF)
+
+    def reboot(self, outlet: int) -> None:
+        self.np02b.port(outlet).reboot()
+
+    def status(self) -> dict[str, bool]:
+        """Returns the status of the np02b outlets.
+
+        Return:
+            Mapping of outlet index ('1' and '2') to true if ON, otherwise
+            false.
+        """
+        return {
+            "1": self.np02b.port(1).status() is pdu.PowerState.ON,
+            "2": self.np02b.port(2).status() is pdu.PowerState.ON,
+        }
+
+    def close(self) -> None:
+        """Ensure connection to device is closed.
+
+        In this implementation, this shouldn't be necessary, but could be in
+        others that open on creation.
+        """
+        return
+
+
+class NP02B(pdu.PDU):
+    """Controller for a Synaccess netBooter NP-02B.
+
+    See https://www.synaccess-net.com/np-02b
+    """
+
+    def __init__(self, host: str, username: str, password: str) -> None:
+        self.client = Client(host, username, password)
+
+    def port(self, index: int) -> pdu.Port:
+        return Port(self.client, index)
+
+    def __len__(self) -> int:
+        return 2
+
+
+class ParsePDUResponseError(signals.TestError):
+    """Error when the PDU returns an unexpected response."""
+
+
+class Client:
+    def __init__(self, host: str, user: str, password: str) -> None:
+        self._url = f"http://{host}/cmd.cgi"
+
+        password_manager = urllib.request.HTTPPasswordMgrWithDefaultRealm()
+        password_manager.add_password(None, host, user, password)
+        auth_handler = urllib.request.HTTPBasicAuthHandler(password_manager)
+        self._opener = urllib.request.build_opener(auth_handler)
+
+        self.log = PrefixLoggerAdapter(
+            logging.getLogger(),
+            {PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[pdu | {host}]"},
+        )
+
+    def request(self, command: Command) -> Response:
+        cmd = command.code()
+        args = command.args()
+        if args:
+            cmd += f' {" ".join(args)}'
+
+        url = f"{self._url}?{urllib.parse.quote_plus(cmd)}"
+        self.log.debug(f"Sending request {url}")
+
+        with self._opener.open(url) as res:
+            body = res.read().decode("utf-8")
+
+        self.log.debug(f"Received response: {body}")
+
+        # Syntax for the response should be in the form:
+        #    "<StatusCode>[,<PowerStatus>]"
+        # For example, StatusCommand returns "$A5,01" when Port 1 is ON and
+        # Port 2 is OFF.
+        try:
+            tokens = body.split(",", 1)
+            if len(tokens) == 0:
+                raise ParsePDUResponseError(f'Expected a response, found "{body}"')
+            code = tokens[0]
+            status_code = StatusCode(code)
+            power_status = PowerStatus(tokens[1]) if len(tokens) == 2 else None
+        except Exception as e:
+            raise ParsePDUResponseError(
+                f'Failed to parse response from "{body}"'
+            ) from e
+
+        return Response(status_code, power_status)
+
+
+class Port(pdu.Port):
+    def __init__(self, client: Client, port: int) -> None:
+        if port == 0:
+            raise TypeError("Invalid port index 0: ports are 1-indexed")
+        if port > 2:
+            raise TypeError(f"Invalid port index {port}: NP-02B only has 2 ports")
+
+        self.client = client
+        self.port = port
+
+    def status(self) -> pdu.PowerState:
+        resp = self.client.request(StatusCommand())
+        if resp.status != StatusCode.OK:
+            raise ParsePDUResponseError(
+                f"Expected PDU response to be {StatusCode.OK}, got {resp.status}"
+            )
+        if not resp.power:
+            raise ParsePDUResponseError(
+                "Expected PDU response to contain power, got None"
+            )
+        return resp.power.state(self.port)
+
+    def set(self, state: pdu.PowerState) -> None:
+        """Set the power state for this port on the PDU.
+
+        Args:
+            state: Desired power state
+        """
+        resp = self.client.request(SetCommand(self.port, state))
+        if resp.status != StatusCode.OK:
+            raise ParsePDUResponseError(
+                f"Expected PDU response to be {StatusCode.OK}, got {resp.status}"
+            )
+
+        # Verify the newly set power state.
+        status = self.status()
+        if status is not state:
+            raise ParsePDUResponseError(
+                f"Expected PDU port {self.port} to be {state}, got {status}"
+            )
+
+
+@dataclass
+class Response:
+    status: StatusCode
+    power: PowerStatus | None
+
+
+@unique
+class StatusCode(StrEnum):
+    OK = "$A0"
+    FAILED = "$AF"
+
+
+class Command(Protocol):
+    def code(self) -> str:
+        """Return the cmdCode for this command."""
+        ...
+
+    def args(self) -> list[str]:
+        """Return the list of arguments for this command."""
+        ...
+
+
+class PowerStatus:
+    """State of all ports"""
+
+    def __init__(self, states: str) -> None:
+        self.states: list[pdu.PowerState] = []
+        for state in states:
+            self.states.insert(0, pdu.PowerState(int(state)))
+
+    def ports(self) -> int:
+        return len(self.states)
+
+    def state(self, port: int) -> pdu.PowerState:
+        return self.states[port - 1]
+
+
+class SetCommand(Command):
+    def __init__(self, port: int, state: pdu.PowerState) -> None:
+        self.port = port
+        self.state = state
+
+    def code(self) -> str:
+        return "$A3"
+
+    def args(self) -> list[str]:
+        return [str(self.port), str(self.state)]
+
+
+class RebootCommand(Command):
+    def __init__(self, port: int) -> None:
+        self.port = port
+
+    def code(self) -> str:
+        return "$A4"
+
+    def args(self) -> list[str]:
+        return [str(self.port)]
+
+
+class StatusCommand(Command):
+    def code(self) -> str:
+        return "$A5"
+
+    def args(self) -> list[str]:
+        return []
+
+
+class SetAllCommand(Command):
+    def __init__(self, state: pdu.PowerState) -> None:
+        self.state = state
+
+    def code(self) -> str:
+        return "$A7"
+
+    def args(self) -> list[str]:
+        return [str(self.state)]
diff --git a/src/antlion/controllers/sl4a_lib/__init__.py b/packages/antlion/controllers/sl4a_lib/__init__.py
similarity index 100%
rename from src/antlion/controllers/sl4a_lib/__init__.py
rename to packages/antlion/controllers/sl4a_lib/__init__.py
diff --git a/src/antlion/controllers/sl4a_lib/error_reporter.py b/packages/antlion/controllers/sl4a_lib/error_reporter.py
similarity index 96%
rename from src/antlion/controllers/sl4a_lib/error_reporter.py
rename to packages/antlion/controllers/sl4a_lib/error_reporter.py
index e560567..0829d01 100644
--- a/src/antlion/controllers/sl4a_lib/error_reporter.py
+++ b/packages/antlion/controllers/sl4a_lib/error_reporter.py
@@ -31,7 +31,7 @@
 
     def process(self, msg, kwargs):
         """Transforms a log message to be in a given format."""
-        return "[Error Report|%s] %s" % (self.label, msg), kwargs
+        return f"[Error Report|{self.label}] {msg}", kwargs
 
 
 class ErrorReporter(object):
@@ -74,7 +74,7 @@
             if not ticket:
                 return False
 
-            report = ErrorLogger("%s|%s" % (self.name, ticket))
+            report = ErrorLogger(f"{self.name}|{ticket}")
             report.info("Creating error report.")
 
             (
@@ -93,7 +93,7 @@
         adb_uptime = utils.get_command_uptime('"adb .* server"')
         if adb_uptime:
             report.info(
-                "The adb daemon has an uptime of %s " "([[dd-]hh:]mm:ss)." % adb_uptime
+                f"The adb daemon has an uptime of {adb_uptime} ([[dd-]hh:]mm:ss)."
             )
         else:
             report.warning(
@@ -164,7 +164,7 @@
 
     def report_sl4a_state(self, rpc_connection, adb, report):
         """Creates an error report for the state of SL4A."""
-        report.info("Diagnosing Failure over connection %s." % rpc_connection.ports)
+        report.info(f"Diagnosing Failure over connection {rpc_connection.ports}.")
 
         ports = rpc_connection.ports
         forwarded_ports_output = adb.forward("--list")
diff --git a/src/antlion/controllers/sl4a_lib/event_dispatcher.py b/packages/antlion/controllers/sl4a_lib/event_dispatcher.py
similarity index 91%
rename from src/antlion/controllers/sl4a_lib/event_dispatcher.py
rename to packages/antlion/controllers/sl4a_lib/event_dispatcher.py
index 503923c..826bf1d 100644
--- a/src/antlion/controllers/sl4a_lib/event_dispatcher.py
+++ b/packages/antlion/controllers/sl4a_lib/event_dispatcher.py
@@ -14,13 +14,15 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from concurrent.futures import ThreadPoolExecutor
+import logging
 import queue
 import re
 import threading
 import time
+from concurrent.futures import ThreadPoolExecutor
 
-from antlion import logger
+from mobly import logger
+
 from antlion.controllers.sl4a_lib import rpc_client
 
 
@@ -63,15 +65,12 @@
         self._handlers = {}
         self._lock = threading.RLock()
 
-        def _log_formatter(message):
-            """Defines the formatting used in the logger."""
-            return "[E Dispatcher|%s|%s] %s" % (
-                self._serial,
-                self._rpc_client.uid,
-                message,
-            )
-
-        self.log = logger.create_logger(_log_formatter)
+        self.log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[E Dispatcher|{self._serial}|{self._rpc_client.uid}]",
+            },
+        )
 
     def poll_events(self):
         """Continuously polls all types of events from sl4a.
@@ -90,13 +89,13 @@
                     self.log.warning("Closing due to closed session.")
                     break
                 else:
-                    self.log.warning("Closing due to error: %s." % e)
+                    self.log.warning(f"Closing due to error: {e}.")
                     self.close()
                     raise e
             if not event_obj:
                 continue
             elif "name" not in event_obj:
-                self.log.error("Received Malformed event {}".format(event_obj))
+                self.log.error(f"Received Malformed event {event_obj}")
                 continue
             else:
                 event_name = event_obj["name"]
@@ -113,7 +112,7 @@
                 )
                 self.handle_subscribed_event(event_obj, event_name)
             else:
-                self.log.debug("Queuing event: %r" % event_obj)
+                self.log.debug(f"Queuing event: {event_obj!r}")
                 self._lock.acquire()
                 if event_name in self._event_dict:  # otherwise, cache event
                     self._event_dict[event_name].put(event_obj)
@@ -146,9 +145,7 @@
         self._lock.acquire()
         try:
             if event_name in self._handlers:
-                raise DuplicateError(
-                    "A handler for {} already exists".format(event_name)
-                )
+                raise DuplicateError(f"A handler for {event_name} already exists")
             self._handlers[event_name] = (handler, args)
         finally:
             self._lock.release()
@@ -206,9 +203,7 @@
         e_queue = self.get_event_q(event_name)
 
         if not e_queue:
-            raise IllegalStateError(
-                "Failed to get an event queue for {}".format(event_name)
-            )
+            raise IllegalStateError(f"Failed to get an event queue for {event_name}")
 
         try:
             # Block for timeout
@@ -221,7 +216,7 @@
                 # Block forever on event wait
                 return e_queue.get(True)
         except queue.Empty:
-            msg = "Timeout after {}s waiting for event: {}".format(timeout, event_name)
+            msg = f"Timeout after {timeout}s waiting for event: {event_name}"
             self.log.info(msg)
             raise queue.Empty(msg)
 
@@ -260,9 +255,9 @@
             try:
                 event = self.pop_event(event_name, 1)
                 if consume_events:
-                    self.log.debug("Consuming event: %r" % event)
+                    self.log.debug(f"Consuming event: {event!r}")
                 else:
-                    self.log.debug("Peeking at event: %r" % event)
+                    self.log.debug(f"Peeking at event: {event!r}")
                     ignored_events.append(event)
             except queue.Empty:
                 pass
@@ -270,17 +265,13 @@
             if event and predicate(event, *args, **kwargs):
                 for ignored_event in ignored_events:
                     self.get_event_q(event_name).put(ignored_event)
-                self.log.debug(
-                    "Matched event: %r with %s" % (event, predicate.__name__)
-                )
+                self.log.debug(f"Matched event: {event!r} with {predicate.__name__}")
                 return event
 
             if time.time() > deadline:
                 for ignored_event in ignored_events:
                     self.get_event_q(event_name).put(ignored_event)
-                msg = "Timeout after {}s waiting for event: {}".format(
-                    timeout, event_name
-                )
+                msg = f"Timeout after {timeout}s waiting for event: {event_name}"
                 self.log.info(msg)
                 raise queue.Empty(msg)
 
@@ -318,9 +309,7 @@
                 break
             time.sleep(freq)
         if len(results) == 0:
-            msg = "Timeout after {}s waiting for event: {}".format(
-                timeout, regex_pattern
-            )
+            msg = f"Timeout after {timeout}s waiting for event: {regex_pattern}"
             self.log.error(msg)
             raise queue.Empty(msg)
 
diff --git a/src/antlion/controllers/sl4a_lib/rpc_client.py b/packages/antlion/controllers/sl4a_lib/rpc_client.py
similarity index 96%
rename from src/antlion/controllers/sl4a_lib/rpc_client.py
rename to packages/antlion/controllers/sl4a_lib/rpc_client.py
index cc2cee2..8dce0ae 100644
--- a/src/antlion/controllers/sl4a_lib/rpc_client.py
+++ b/packages/antlion/controllers/sl4a_lib/rpc_client.py
@@ -14,13 +14,15 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 import json
+import logging
 import socket
 import threading
 import time
 from concurrent import futures
 
+from mobly import logger
+
 from antlion import error
-from antlion import logger
 
 # The default timeout value when no timeout is set.
 SOCKET_TIMEOUT = 60
@@ -68,7 +70,7 @@
                 self.data,
             )
         else:
-            return "Error in RPC %s %s:%s" % (self.rpc_name, self.code, self.message)
+            return f"Error in RPC {self.rpc_name} {self.code}:{self.message}"
 
 
 class Sl4aConnectionError(Sl4aException):
@@ -163,11 +165,12 @@
         self.uid = self._free_connections[0].uid
         self._lock = threading.Lock()
 
-        def _log_formatter(message):
-            """Formats the message to be logged."""
-            return "[RPC Service|%s|%s] %s" % (self._serial, self.uid, message)
-
-        self._log = logger.create_logger(_log_formatter)
+        self._log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[RPC Service|{self._serial}|{self._uid}]",
+            },
+        )
 
         self._working_connections = []
         if max_connections is None:
@@ -188,7 +191,7 @@
             )
         connections = self._free_connections + self._working_connections
         for connection in connections:
-            self._log.debug("Closing connection over ports %s" % connection.ports)
+            self._log.debug(f"Closing connection over ports {connection.ports}")
             connection.close()
         self._free_connections = []
         self._working_connections = []
@@ -307,7 +310,7 @@
                 ticket,
                 timeout or SOCKET_TIMEOUT,
             )
-            self._log.debug("Closing timed out connection over %s" % connection.ports)
+            self._log.debug(f"Closing timed out connection over {connection.ports}")
             connection.close()
             self._working_connections.remove(connection)
             # Re-raise the error as an SL4A Error so end users can process it.
diff --git a/src/antlion/controllers/sl4a_lib/rpc_connection.py b/packages/antlion/controllers/sl4a_lib/rpc_connection.py
similarity index 83%
rename from src/antlion/controllers/sl4a_lib/rpc_connection.py
rename to packages/antlion/controllers/sl4a_lib/rpc_connection.py
index 6b9bf25..cfabcad 100644
--- a/src/antlion/controllers/sl4a_lib/rpc_connection.py
+++ b/packages/antlion/controllers/sl4a_lib/rpc_connection.py
@@ -14,11 +14,15 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 import json
+import logging
 import socket
 import threading
 
-from antlion import logger
+from mobly import logger
+
+from antlion.controllers.adb import AdbProxy
 from antlion.controllers.sl4a_lib import rpc_client
+from antlion.controllers.sl4a_lib.sl4a_ports import Sl4aPorts
 
 # The Session UID when a UID has not been received yet.
 UNKNOWN_UID = -1
@@ -49,7 +53,9 @@
         uid: The SL4A session ID.
     """
 
-    def __init__(self, adb, ports, client_socket, socket_fd, uid=UNKNOWN_UID):
+    def __init__(
+        self, adb: AdbProxy, ports: Sl4aPorts, client_socket, socket_fd, uid=UNKNOWN_UID
+    ):
         self._client_socket = client_socket
         self._socket_file = socket_fd
         self._ticket_counter = 0
@@ -57,16 +63,12 @@
         self.adb = adb
         self.uid = uid
 
-        def _log_formatter(message):
-            """Defines the formatting used in the logger."""
-            return "[SL4A Client|%s|%s|%s] %s" % (
-                self.adb.serial,
-                self.ports.client_port,
-                self.uid,
-                message,
-            )
-
-        self.log = logger.create_logger(_log_formatter)
+        self.log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[SL4A Client|{self.adb.serial}|{ports.client_port}|{self.uid}]",
+            },
+        )
 
         self.ports = ports
         self.set_timeout(rpc_client.SOCKET_TIMEOUT)
@@ -98,9 +100,9 @@
         if result["status"]:
             self.uid = result["uid"]
         else:
-            self.log.warning("UID not received for connection %s." % self.ports)
+            self.log.warning(f"UID not received for connection {self.ports}.")
             self.uid = UNKNOWN_UID
-        self.log.debug("Created connection over: %s." % self.ports)
+        self.log.debug(f"Created connection over: {self.ports}.")
 
     def _cmd(self, command):
         """Sends an session protocol command to SL4A to establish communication.
@@ -129,12 +131,13 @@
         """Sends a request over the connection."""
         self._socket_file.write(request.encode("utf8") + b"\n")
         self._socket_file.flush()
-        self.log.debug("Sent: " + request)
+        self.log.debug(f"Sent: {request}")
 
     def get_response(self):
         """Returns the first response sent back to the client."""
         data = self._socket_file.readline()
-        self.log.debug("Received: " + data.decode("utf8", errors="replace"))
+        bytes = data.decode("utf8", errors="replace")
+        self.log.debug(f"Received: {bytes}")
         return data
 
     def close(self):
diff --git a/src/antlion/controllers/sl4a_lib/sl4a_manager.py b/packages/antlion/controllers/sl4a_lib/sl4a_manager.py
similarity index 92%
rename from src/antlion/controllers/sl4a_lib/sl4a_manager.py
rename to packages/antlion/controllers/sl4a_lib/sl4a_manager.py
index f3d7047..d093b5a 100644
--- a/src/antlion/controllers/sl4a_lib/sl4a_manager.py
+++ b/packages/antlion/controllers/sl4a_lib/sl4a_manager.py
@@ -13,14 +13,13 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+import logging
 import threading
-
 import time
 
-from antlion import logger
-from antlion.controllers.sl4a_lib import rpc_client
-from antlion.controllers.sl4a_lib import sl4a_session
-from antlion.controllers.sl4a_lib import error_reporter
+from mobly import logger
+
+from antlion.controllers.sl4a_lib import error_reporter, rpc_client, sl4a_session
 
 ATTEMPT_INTERVAL = 0.25
 MAX_WAIT_ON_SERVER_SECONDS = 5
@@ -116,12 +115,16 @@
         self._listen_for_port_lock = threading.Lock()
         self._sl4a_ports = set()
         self.adb = adb
-        self.log = logger.create_logger(
-            lambda msg: "[SL4A Manager|%s] %s" % (adb.serial, msg)
+        self.log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[SL4A Manager|{adb.serial}]",
+            },
         )
+
         self.sessions = {}
         self._started = False
-        self.error_reporter = error_reporter.ErrorReporter("SL4A %s" % adb.serial)
+        self.error_reporter = error_reporter.ErrorReporter(f"SL4A {adb.serial}")
 
     @property
     def sl4a_ports_in_use(self):
@@ -197,7 +200,7 @@
         Will return none if no port is found.
         """
         possible_ports = self._get_all_ports()
-        self.log.debug("SL4A Ports found: %s" % possible_ports)
+        self.log.debug(f"SL4A Ports found: {possible_ports}")
 
         # Acquire the lock. We lock this method because if multiple threads
         # attempt to get a server at the same time, they can potentially find
@@ -211,7 +214,7 @@
 
     def is_sl4a_installed(self):
         """Returns True if SL4A is installed on the AndroidDevice."""
-        return bool(self.adb.shell("pm path %s" % SL4A_PKG_NAME, ignore_status=True))
+        return bool(self.adb.shell(f"pm path {SL4A_PKG_NAME}", ignore_status=True))
 
     def start_sl4a_service(self):
         """Starts the SL4A Service on the device.
@@ -223,13 +226,13 @@
             self._started = True
             if not self.is_sl4a_installed():
                 raise rpc_client.Sl4aNotInstalledError(
-                    "SL4A is not installed on device %s" % self.adb.serial
+                    f"SL4A is not installed on device {self.adb.serial}"
                 )
-            if self.adb.shell('(ps | grep "S %s") || true' % SL4A_PKG_NAME):
+            if self.adb.shell(f'(ps | grep "S {SL4A_PKG_NAME}") || true'):
                 # Close all SL4A servers not opened by this manager.
                 # TODO(markdr): revert back to closing all ports after
                 # b/76147680 is resolved.
-                self.adb.shell("kill -9 $(pidof %s)" % SL4A_PKG_NAME)
+                self.adb.shell(f"kill -9 $(pidof {SL4A_PKG_NAME})")
             self.adb.shell('settings put global hidden_api_blacklist_exemptions "*"')
             # Start the service if it is not up already.
             self.adb.shell(_SL4A_START_SERVICE_CMD)
@@ -293,7 +296,7 @@
     def stop_service(self):
         """Stops The SL4A Service. Force-stops the SL4A apk."""
         try:
-            self.adb.shell("am force-stop %s" % SL4A_PKG_NAME, ignore_status=True)
+            self.adb.shell(f"am force-stop {SL4A_PKG_NAME}", ignore_status=True)
         except Exception as e:
             self.log.warning("Fail to stop package %s: %s", SL4A_PKG_NAME, e)
         self._started = False
diff --git a/src/antlion/controllers/sl4a_lib/sl4a_ports.py b/packages/antlion/controllers/sl4a_lib/sl4a_ports.py
similarity index 82%
rename from src/antlion/controllers/sl4a_lib/sl4a_ports.py
rename to packages/antlion/controllers/sl4a_lib/sl4a_ports.py
index db9917e..66306f2 100644
--- a/src/antlion/controllers/sl4a_lib/sl4a_ports.py
+++ b/packages/antlion/controllers/sl4a_lib/sl4a_ports.py
@@ -24,14 +24,12 @@
         server_port: The port on the device associated with the SL4A server.
     """
 
-    def __init__(self, client_port=0, forwarded_port=0, server_port=0):
+    def __init__(
+        self, client_port: int = 0, forwarded_port: int = 0, server_port: int = 0
+    ) -> None:
         self.client_port = client_port
         self.forwarded_port = forwarded_port
         self.server_port = server_port
 
-    def __str__(self):
-        return "(%s, %s, %s)" % (
-            self.client_port,
-            self.forwarded_port,
-            self.server_port,
-        )
+    def __str__(self) -> str:
+        return f"({self.client_port}, {self.forwarded_port}, {self.server_port})"
diff --git a/src/antlion/controllers/sl4a_lib/sl4a_session.py b/packages/antlion/controllers/sl4a_lib/sl4a_session.py
similarity index 95%
rename from src/antlion/controllers/sl4a_lib/sl4a_session.py
rename to packages/antlion/controllers/sl4a_lib/sl4a_session.py
index 27edc48..6ecf2fd 100644
--- a/src/antlion/controllers/sl4a_lib/sl4a_session.py
+++ b/packages/antlion/controllers/sl4a_lib/sl4a_session.py
@@ -13,17 +13,20 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+import errno
+import logging
 import socket
 import threading
 
-import errno
+from mobly import logger
 
-from antlion import logger
 from antlion.controllers.adb_lib.error import AdbError
-from antlion.controllers.sl4a_lib import event_dispatcher
-from antlion.controllers.sl4a_lib import rpc_connection
-from antlion.controllers.sl4a_lib import rpc_client
-from antlion.controllers.sl4a_lib import sl4a_ports
+from antlion.controllers.sl4a_lib import (
+    event_dispatcher,
+    rpc_client,
+    rpc_connection,
+    sl4a_ports,
+)
 from antlion.controllers.sl4a_lib.rpc_client import Sl4aStartError
 
 SOCKET_TIMEOUT = 60
@@ -77,15 +80,17 @@
         self._terminate_lock = threading.Lock()
         self._terminated = False
         self.adb = adb
+        self.uid = UNKNOWN_UID
 
-        def _log_formatter(message):
-            return "[SL4A Session|%s|%s] %s" % (self.adb.serial, self.uid, message)
-
-        self.log = logger.create_logger(_log_formatter)
+        self.log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[SL4A Session|{self.adb.serial}|{self.uid}]",
+            },
+        )
 
         self.forwarded_port = forwarded_port
         self.server_port = device_port
-        self.uid = UNKNOWN_UID
         self.obtain_server_port = get_server_port_func
         self._on_error_callback = on_error_callback
 
diff --git a/src/antlion/controllers/sniffer.py b/packages/antlion/controllers/sniffer.py
similarity index 100%
rename from src/antlion/controllers/sniffer.py
rename to packages/antlion/controllers/sniffer.py
diff --git a/src/antlion/controllers/sniffer_lib/__init__.py b/packages/antlion/controllers/sniffer_lib/__init__.py
similarity index 100%
rename from src/antlion/controllers/sniffer_lib/__init__.py
rename to packages/antlion/controllers/sniffer_lib/__init__.py
diff --git a/src/antlion/controllers/sniffer_lib/local/__init__.py b/packages/antlion/controllers/sniffer_lib/local/__init__.py
similarity index 100%
rename from src/antlion/controllers/sniffer_lib/local/__init__.py
rename to packages/antlion/controllers/sniffer_lib/local/__init__.py
diff --git a/src/antlion/controllers/sniffer_lib/local/local_base.py b/packages/antlion/controllers/sniffer_lib/local/local_base.py
similarity index 95%
rename from src/antlion/controllers/sniffer_lib/local/local_base.py
rename to packages/antlion/controllers/sniffer_lib/local/local_base.py
index 8873350..2b3709c 100644
--- a/src/antlion/controllers/sniffer_lib/local/local_base.py
+++ b/packages/antlion/controllers/sniffer_lib/local/local_base.py
@@ -25,7 +25,9 @@
 import signal
 import subprocess
 import tempfile
-from antlion import logger
+
+from mobly import logger
+
 from antlion import utils
 from antlion.controllers import sniffer
 
@@ -120,12 +122,10 @@
             raise sniffer.InvalidOperationError(
                 "Trying to start a sniff while another is still running!"
             )
-        capture_dir = os.path.join(
-            self._logger.log_path, "Sniffer-{}".format(self._interface)
-        )
+        capture_dir = os.path.join(self._logger.log_path, f"Sniffer-{self._interface}")
         os.makedirs(capture_dir, exist_ok=True)
         self._capture_file_path = os.path.join(
-            capture_dir, "capture_{}.pcap".format(logger.get_log_file_timestamp())
+            capture_dir, f"capture_{logger.get_log_file_timestamp()}.pcap"
         )
 
         self._pre_capture_config(override_configs)
diff --git a/src/antlion/controllers/sniffer_lib/local/tcpdump.py b/packages/antlion/controllers/sniffer_lib/local/tcpdump.py
similarity index 91%
rename from src/antlion/controllers/sniffer_lib/local/tcpdump.py
rename to packages/antlion/controllers/sniffer_lib/local/tcpdump.py
index 85622dc..326adc1 100644
--- a/src/antlion/controllers/sniffer_lib/local/tcpdump.py
+++ b/packages/antlion/controllers/sniffer_lib/local/tcpdump.py
@@ -15,6 +15,7 @@
 # limitations under the License.
 
 import shutil
+
 from antlion.controllers import sniffer
 from antlion.controllers.sniffer_lib.local import local_base
 
@@ -36,7 +37,7 @@
 
     def get_descriptor(self):
         """See base class documentation"""
-        return "local-tcpdump-{}".format(self._interface)
+        return f"local-tcpdump-{self._interface}"
 
     def get_subtype(self):
         """See base class documentation"""
@@ -47,7 +48,7 @@
             self._executable_path, self._interface, self._temp_capture_file_path
         )
         if packet_count is not None:
-            cmd = "{} -c {}".format(cmd, packet_count)
+            cmd = f"{cmd} -c {packet_count}"
         if additional_args is not None:
-            cmd = "{} {}".format(cmd, additional_args)
+            cmd = f"{cmd} {additional_args}"
         return cmd
diff --git a/src/antlion/controllers/sniffer_lib/local/tshark.py b/packages/antlion/controllers/sniffer_lib/local/tshark.py
similarity index 91%
rename from src/antlion/controllers/sniffer_lib/local/tshark.py
rename to packages/antlion/controllers/sniffer_lib/local/tshark.py
index dd79eed..b873fe8 100644
--- a/src/antlion/controllers/sniffer_lib/local/tshark.py
+++ b/packages/antlion/controllers/sniffer_lib/local/tshark.py
@@ -15,6 +15,7 @@
 # limitations under the License.
 
 import shutil
+
 from antlion.controllers import sniffer
 from antlion.controllers.sniffer_lib.local import local_base
 
@@ -50,9 +51,9 @@
             self._executable_path, self._interface, self._temp_capture_file_path
         )
         if duration is not None:
-            cmd = "{} -a duration:{}".format(cmd, duration)
+            cmd = f"{cmd} -a duration:{duration}"
         if packet_count is not None:
-            cmd = "{} -c {}".format(cmd, packet_count)
+            cmd = f"{cmd} -c {packet_count}"
         if additional_args is not None:
-            cmd = "{} {}".format(cmd, additional_args)
+            cmd = f"{cmd} {additional_args}"
         return cmd
diff --git a/src/antlion/controllers/utils_lib/__init__.py b/packages/antlion/controllers/utils_lib/__init__.py
similarity index 100%
rename from src/antlion/controllers/utils_lib/__init__.py
rename to packages/antlion/controllers/utils_lib/__init__.py
diff --git a/src/antlion/controllers/utils_lib/commands/__init__.py b/packages/antlion/controllers/utils_lib/commands/__init__.py
similarity index 100%
rename from src/antlion/controllers/utils_lib/commands/__init__.py
rename to packages/antlion/controllers/utils_lib/commands/__init__.py
diff --git a/packages/antlion/controllers/utils_lib/commands/command.py b/packages/antlion/controllers/utils_lib/commands/command.py
new file mode 100644
index 0000000..93d6f59
--- /dev/null
+++ b/packages/antlion/controllers/utils_lib/commands/command.py
@@ -0,0 +1,115 @@
+# Copyright 2024 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import subprocess
+from typing import IO, Protocol, TypeVar
+
+from mobly import signals
+
+from antlion.runner import CalledProcessError, Runner
+
+
+class Command(Protocol):
+    """A runnable binary."""
+
+    def binary(self) -> str:
+        """Return the binary used for this command."""
+        ...
+
+    def available(self) -> bool:
+        """Return true if this command is available to run."""
+        ...
+
+
+_C = TypeVar("_C", bound=Command)
+
+
+def require(command: _C) -> _C:
+    """Require a command to be available."""
+    if command.available():
+        return command
+    raise signals.TestAbortClass(f"Required command not found: {command.binary()}")
+
+
+def optional(command: _C) -> _C | None:
+    """Optionally require a command to be available."""
+    if command.available():
+        return command
+    return None
+
+
+class LinuxCommand(Command):
+    """A command running on a Linux machine."""
+
+    def __init__(self, runner: Runner, binary: str) -> None:
+        self._runner = runner
+        self._binary = binary
+        self._can_sudo = self._available("sudo")
+
+    def binary(self) -> str:
+        """Return the binary used for this command."""
+        return self._binary
+
+    def available(self) -> bool:
+        """Return true if this command is available to run."""
+        return self._available(self._binary)
+
+    def _available(self, binary: str) -> bool:
+        """Check if binary is available to run."""
+        try:
+            self._runner.run(["command", "-v", binary])
+        except CalledProcessError:
+            return False
+        return True
+
+    def _run(
+        self,
+        args: list[str],
+        sudo: bool = False,
+        timeout_sec: float | None = None,
+        log_output: bool = True,
+    ) -> subprocess.CompletedProcess[bytes]:
+        """Run the command without having to specify the binary.
+
+        Args:
+            args: List of arguments to pass to the binary
+            sudo: Use sudo to execute the binary, if available
+            timeout_sec: Seconds to wait for command to finish
+            log_output: If true, print stdout and stderr to the debug log.
+        """
+        if sudo and self._can_sudo:
+            cmd = ["sudo", self._binary]
+        else:
+            cmd = [self._binary]
+        return self._runner.run(
+            cmd + args, timeout_sec=timeout_sec, log_output=log_output
+        )
+
+    def _start(
+        self,
+        args: list[str],
+        sudo: bool = False,
+        stdout: IO[bytes] | int = subprocess.PIPE,
+    ) -> subprocess.Popen[bytes]:
+        """Start the command without having to specify the binary.
+
+        Args:
+            args: List of arguments to pass to the binary
+            sudo: Use sudo to execute the binary, if available
+        """
+        if sudo and self._can_sudo:
+            cmd = ["sudo", self._binary]
+        else:
+            cmd = [self._binary]
+        return self._runner.start(cmd + args, stdout)
diff --git a/packages/antlion/controllers/utils_lib/commands/date.py b/packages/antlion/controllers/utils_lib/commands/date.py
new file mode 100644
index 0000000..84e628a
--- /dev/null
+++ b/packages/antlion/controllers/utils_lib/commands/date.py
@@ -0,0 +1,34 @@
+# Copyright 2024 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import datetime
+
+from antlion.controllers.utils_lib.commands.command import LinuxCommand
+from antlion.runner import Runner
+
+
+class LinuxDateCommand(LinuxCommand):
+    """Look through current running processes."""
+
+    def __init__(self, runner: Runner, binary: str = "date") -> None:
+        super().__init__(runner, binary)
+
+    def sync(self) -> None:
+        """Synchronize system time.
+
+        Allows for better synchronization between antlion host logs and device
+        logs. Useful for when the device does not have an internet connection.
+        """
+        now = datetime.datetime.now().astimezone().isoformat()
+        self._run(["-s", now], sudo=True)
diff --git a/packages/antlion/controllers/utils_lib/commands/ip.py b/packages/antlion/controllers/utils_lib/commands/ip.py
new file mode 100644
index 0000000..e3de2d5
--- /dev/null
+++ b/packages/antlion/controllers/utils_lib/commands/ip.py
@@ -0,0 +1,187 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import ipaddress
+import re
+import subprocess
+from typing import Iterator
+
+from mobly import signals
+
+from antlion.controllers.utils_lib.commands.command import LinuxCommand
+from antlion.runner import Runner
+
+
+class LinuxIpCommand(LinuxCommand):
+    """Interface for doing standard IP commands on a linux system.
+
+    Wraps standard shell commands used for ip into a python object that can
+    be interacted with more easily.
+    """
+
+    def __init__(self, runner: Runner, binary: str = "ip"):
+        """Create a LinuxIpCommand.
+
+        Args:
+            runner: Runner to use to execute this command.
+            binary: Path to binary to use. Defaults to "ip".
+            sudo: Requires root permissions. Defaults to False.
+        """
+        super().__init__(runner, binary)
+
+    def get_ipv4_addresses(
+        self, net_interface: str
+    ) -> Iterator[tuple[ipaddress.IPv4Interface, ipaddress.IPv4Address | None]]:
+        """Gets all ipv4 addresses of a network interface.
+
+        Args:
+            net_interface: string, The network interface to get info on
+                           (eg. wlan0).
+
+        Returns: An iterator of tuples that contain (address, broadcast).
+                 where address is a ipaddress.IPv4Interface and broadcast
+                 is an ipaddress.IPv4Address.
+        """
+        results = self._run(["addr", "show", "dev", net_interface])
+        lines = results.stdout.splitlines()
+
+        # Example stdout:
+        # 2: eth0: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc mq state UP group default qlen 1000
+        #   link/ether 48:0f:cf:3c:9d:89 brd ff:ff:ff:ff:ff:ff
+        #   inet 192.168.1.1/24 brd 192.168.1.255 scope global eth0
+        #       valid_lft forever preferred_lft forever
+        #   inet6 2620:0:1000:1500:a968:a776:2d80:a8b3/64 scope global temporary dynamic
+        #       valid_lft 599919sec preferred_lft 80919sec
+
+        for line_bytes in lines:
+            line = line_bytes.decode("utf-8").strip()
+            match = re.search("inet (?P<address>[^\\s]*) brd (?P<bcast>[^\\s]*)", line)
+            if match:
+                d = match.groupdict()
+                address = ipaddress.IPv4Interface(d["address"])
+                bcast = ipaddress.IPv4Address(d["bcast"])
+                yield (address, bcast)
+
+            match = re.search("inet (?P<address>[^\\s]*)", line)
+            if match:
+                d = match.groupdict()
+                address = ipaddress.IPv4Interface(d["address"])
+                yield (address, None)
+
+    def add_ipv4_address(
+        self,
+        net_interface: str,
+        address: ipaddress.IPv4Interface,
+        broadcast: ipaddress.IPv4Address | None = None,
+    ) -> None:
+        """Adds an ipv4 address to a net_interface.
+
+        Args:
+            net_interface: The network interface to get the new ipv4 (eg. wlan0).
+            address: The new ipaddress and netmask to add to an interface.
+            broadcast: The broadcast address to use for this net_interfaces subnet.
+        """
+        args = ["addr", "add", str(address)]
+        if broadcast:
+            args += ["broadcast", str(broadcast)]
+        args += ["dev", net_interface]
+        self._run(args, sudo=True)
+
+    def remove_ipv4_address(
+        self,
+        net_interface: str,
+        address: ipaddress.IPv4Interface | ipaddress.IPv4Address,
+        ignore_status: bool = False,
+    ) -> None:
+        """Remove an ipv4 address.
+
+        Removes an ipv4 address from a network interface.
+
+        Args:
+            net_interface: The network interface to remove the ipv4 address from (eg. wlan0).
+            address: The ip address to remove from the net_interface.
+            ignore_status: True if the exit status can be ignored
+        Returns:
+            The job result from a the command
+        """
+        try:
+            self._run(
+                ["addr", "del", str(address), "dev", net_interface],
+                sudo=True,
+            )
+        except subprocess.CalledProcessError as e:
+            if e.returncode == 2 or "Address not found" in e.stdout:
+                # Do not fail if the address was already deleted or couldn't be
+                # found.
+                return
+            raise e
+
+    def set_ipv4_address(
+        self,
+        net_interface: str,
+        address: ipaddress.IPv4Interface,
+        broadcast: ipaddress.IPv4Address | None = None,
+    ) -> None:
+        """Set the ipv4 address.
+
+        Sets the ipv4 address of a network interface. If the network interface
+        has any other ipv4 addresses these will be cleared.
+
+        Args:
+            net_interface: The network interface to set the ip address on (eg. wlan0).
+            address: The ip address and subnet to give the net_interface.
+            broadcast: The broadcast address to use for the subnet.
+        """
+        self.clear_ipv4_addresses(net_interface)
+        self.add_ipv4_address(net_interface, address, broadcast)
+
+    def clear_ipv4_addresses(self, net_interface: str) -> None:
+        """Clears all ipv4 addresses registered to a net_interface.
+
+        Args:
+            net_interface: The network interface to clear addresses from (eg. wlan0).
+        """
+        ip_info = self.get_ipv4_addresses(net_interface)
+
+        for address, _ in ip_info:
+            try:
+                self.remove_ipv4_address(net_interface, address)
+            except subprocess.CalledProcessError as e:
+                if "RTNETLINK answers: Cannot assign requested address" in e.stderr:
+                    # It is possible that the address has already been removed by the
+                    # time this command has been called.
+                    addresses = [a for a, _ in self.get_ipv4_addresses(net_interface)]
+                    if address not in addresses:
+                        self._runner.log.warning(
+                            "Unable to remove address %s. The address was "
+                            "removed by another process.",
+                            address,
+                        )
+                    else:
+                        raise signals.TestError(
+                            f"Unable to remove address {address}. The address is still "
+                            f"registered to {net_interface}, despite call for removal.",
+                            extras={
+                                "stderr": e.stderr,
+                                "stdout": e.stdout,
+                                "returncode": e.returncode,
+                            },
+                        )
+                raise signals.TestError(
+                    f"Unable to remove address {address}: {e.stderr}",
+                    extras={
+                        "stdout": e.stdout,
+                        "returncode": e.returncode,
+                    },
+                )
diff --git a/packages/antlion/controllers/utils_lib/commands/journalctl.py b/packages/antlion/controllers/utils_lib/commands/journalctl.py
new file mode 100644
index 0000000..c1a21f7
--- /dev/null
+++ b/packages/antlion/controllers/utils_lib/commands/journalctl.py
@@ -0,0 +1,85 @@
+# Copyright 2024 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import shlex
+from datetime import datetime
+
+from antlion.controllers.utils_lib.commands import pgrep
+from antlion.controllers.utils_lib.commands.command import LinuxCommand, require
+from antlion.runner import Runner
+
+# Timestamp format accepted by systemd.
+# See https://man7.org/linux/man-pages/man7/systemd.time.7.html#PARSING_TIMESTAMPS
+SYSTEMD_TIMESTAMP_FORMAT = "%Y-%m-%d %H:%M:%S UTC"
+
+# Wait a maximum of 5 minutes for journalctl to output all systemd journal logs
+# since boot.
+JOURNALCTL_TIMEOUT_SEC = 60 * 5
+
+
+class LinuxJournalctlCommand(LinuxCommand):
+    """Print log entries from the systemd journal.
+
+    Only supported on Linux distributions using systemd.
+    """
+
+    def __init__(self, runner: Runner, binary: str = "journalctl") -> None:
+        super().__init__(runner, binary)
+        self._pgrep = require(pgrep.LinuxPgrepCommand(runner))
+        self._last_ran: datetime | None = None
+        self._logs_before_reset: str | None = None
+
+    def available(self) -> bool:
+        if not super().available():
+            return False
+        return self._pgrep.find("systemd-journal") is not None
+
+    def logs(self) -> str:
+        """Return log entries since the last run or current boot, in that order."""
+        if self._last_ran:
+            args = [
+                "--since",
+                shlex.quote(self._last_ran.strftime(SYSTEMD_TIMESTAMP_FORMAT)),
+            ]
+        else:
+            args = ["--boot"]
+
+        self._last_ran = datetime.utcnow()
+
+        self._runner.log.debug("Running journalctl")
+        logs = self._run(
+            args,
+            sudo=True,
+            log_output=False,
+            timeout_sec=JOURNALCTL_TIMEOUT_SEC,
+        ).stdout.decode("utf-8")
+
+        if self._logs_before_reset:
+            return f"{self._logs_before_reset}\n{logs}"
+        return logs
+
+    def set_runner(self, runner: Runner) -> None:
+        """Set a new runner.
+
+        Use when underlying connection to the device refreshes.
+        """
+        self._runner = runner
+
+    def save_and_reset(self) -> None:
+        """Save logs and reset the last known run time.
+
+        Run before every reboot!
+        """
+        self._logs_before_reset = self.logs()
+        self._last_ran = None
diff --git a/packages/antlion/controllers/utils_lib/commands/nmcli.py b/packages/antlion/controllers/utils_lib/commands/nmcli.py
new file mode 100644
index 0000000..c773573
--- /dev/null
+++ b/packages/antlion/controllers/utils_lib/commands/nmcli.py
@@ -0,0 +1,232 @@
+# Copyright 2024 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+import enum
+from dataclasses import dataclass
+
+from antlion.controllers.utils_lib.commands import pgrep
+from antlion.controllers.utils_lib.commands.command import LinuxCommand, require
+from antlion.runner import Runner
+
+
+class LinuxNmcliCommand(LinuxCommand):
+    """Control the Linux NetworkManager.
+
+    The NetworkManager daemon attempts to make networking configuration and
+    operation as painless and automatic as possible by managing the primary
+    network connection and other network interfaces, like Ethernet, Wi-Fi, and
+    Mobile Broadband devices. NetworkManager will connect any network device
+    when a connection for that device becomes available, unless that behavior is
+    disabled.
+    """
+
+    def __init__(self, runner: Runner, binary: str = "nmcli") -> None:
+        super().__init__(runner, binary)
+        self._pgrep = require(pgrep.LinuxPgrepCommand(runner))
+
+    def available(self) -> bool:
+        if not super().available():
+            return False
+        return self._pgrep.find("NetworkManager") is not None
+
+    def setup_device(self, device: str) -> None:
+        """Create a device connection suitable for antlion testing.
+
+        Disables IPv4 DHCP so that tests can manage IP addresses manually, but
+        still enables automatic IPv6 link-local address assignment.
+        """
+        # Remove existing connections associated with device.
+        for conn in self._get_connections():
+            if conn.device == device:
+                self._delete_connection(conn)
+
+        self._run(
+            [
+                "connection",
+                "add",
+                "ifname",
+                device,
+                "type",
+                "ethernet",
+                "ipv4.method",
+                IPv4Method.DISABLED,
+                "ipv6.method",
+                IPv6Method.LINK_LOCAL,
+            ],
+            sudo=True,
+        )
+
+    def _get_connections(self) -> list[Connection]:
+        res = self._run(
+            [
+                "--get-values",
+                "name,uuid,type,device",
+                "connection",
+            ],
+            sudo=True,
+        )
+        connections: list[Connection] = []
+        for line in res.stdout.splitlines():
+            tokens = line.decode("utf-8").split(":", 3)
+            connections.append(
+                Connection(
+                    name=tokens[0],
+                    uuid=tokens[1],
+                    type=tokens[2],
+                    device=tokens[3],
+                )
+            )
+        return connections
+
+    def _delete_connection(self, conn: Connection) -> None:
+        self._run(
+            [
+                "connection",
+                "delete",
+                "id",
+                conn.name,
+            ],
+            sudo=True,
+        )
+
+    def _down_device(self, device: str) -> None:
+        self._run(
+            [
+                "device",
+                "down",
+                device,
+            ],
+            sudo=True,
+        )
+
+    def _up_device(self, device: str) -> None:
+        self._run(
+            [
+                "device",
+                "up",
+                device,
+            ],
+            sudo=True,
+        )
+
+    def set_ipv4_method(self, device: str, method: IPv4Method) -> None:
+        """Set the IPv4 connection method.
+
+        Args:
+            device: Name of the device to modify.
+            method: Connection method to use.
+        """
+        self._run(
+            [
+                "device",
+                "modify",
+                device,
+                "ipv4.method",
+                method,
+            ],
+            sudo=True,
+        )
+
+
+@dataclass(frozen=True)
+class Connection:
+    name: str
+    uuid: str
+    type: str
+    device: str
+
+
+class IPv4Method(enum.StrEnum):
+    AUTO = "auto"
+    """Enables automatic IPv4 address assignment from DHCP, PPP, or similar services."""
+
+    MANUAL = "manual"
+    """Enables the configuration of static IPv4 addresses on the interface.
+
+    Note that you must set at least one IP address and subnet mask in the
+    "ipv4.addresses" property.
+    """
+
+    DISABLED = "disabled"
+    """Disables the IPv4 protocol in this connection profile."""
+
+    SHARED = "shared"
+    """Provides network access to other computers.
+
+    If you do not specify an IP address and subnet mask in "ipv4.addresses",
+    NetworkManager assigns 10.42.x.1/24 to the interface. Additionally,
+    NetworkManager starts a DHCP server and DNS forwarder. Hosts that connect to
+    this interface will then receive an IP address from the configured range,
+    and NetworkManager configures NAT to map client addresses to the one of the
+    current default network connection.
+    """
+
+    LINK_LOCAL = "link-local"
+    """Enables link-local addresses according to RFC 3927.
+
+    NetworkManager assigns a random link-local address from the 169.254.0.0/16
+    subnet to the interface.
+    """
+
+
+class IPv6Method(enum.StrEnum):
+    AUTO = "auto"
+    """Enables IPv6 auto-configuration.
+
+    By default, NetworkManager uses Router Advertisements and, if the router
+    announces the "managed" flag, NetworkManager requests an IPv6 address and
+    prefix from a DHCPv6 server.
+    """
+
+    DHCP = "dhcp"
+    """Requests an IPv6 address and prefix from a DHCPv6 server.
+
+    Note that DHCPv6 does not have options to provide routes and the default
+    gateway. As a consequence, by using the "dhcp" method, connections are
+    limited to their own subnet.
+    """
+
+    MANUAL = "manual"
+    """Enables the configuration of static IPv6 addresses on the interface.
+
+    Note that you must set at least one IP address and prefix in the
+    "ipv6.addresses" property.
+    """
+
+    DISABLED = "disabled"
+    """Disables the IPv6 protocol in this connection profile."""
+
+    IGNORE = "ignore"
+    """Make no changes to the IPv6 configuration on the interface.
+
+    For example, you can then use the "accept_ra" feature of the kernel to
+    accept Router Advertisements.
+    """
+
+    SHARED = "shared"
+    """Provides network access to other computers.
+
+    NetworkManager requests a prefix from an upstream DHCPv6 server, assigns an
+    address to the interface, and announces the prefix to clients that connect
+    to this interface.
+    """
+
+    LINK_LOCAL = "link-local"
+    """Enabled link-local addresses according to RFC 3927.
+
+    Assigns a random link-local address from the fe80::/64 subnet to the
+    interface.
+    """
diff --git a/packages/antlion/controllers/utils_lib/commands/pgrep.py b/packages/antlion/controllers/utils_lib/commands/pgrep.py
new file mode 100644
index 0000000..cf6f271
--- /dev/null
+++ b/packages/antlion/controllers/utils_lib/commands/pgrep.py
@@ -0,0 +1,43 @@
+# Copyright 2024 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import subprocess
+
+from antlion.controllers.utils_lib.commands.command import LinuxCommand
+from antlion.runner import Runner
+
+
+class LinuxPgrepCommand(LinuxCommand):
+    """Look through current running processes."""
+
+    def __init__(self, runner: Runner, binary: str = "pgrep") -> None:
+        super().__init__(runner, binary)
+
+    def find(self, process: str) -> list[int] | None:
+        """Find a process by name.
+
+        Args:
+            process: Name of the process to query
+
+        Returns:
+            List of process IDs if running, otherwise None.
+        """
+        try:
+            result = self._run(["-x", process])
+            return [int(line) for line in result.stdout.splitlines()]
+        except subprocess.CalledProcessError as e:
+            if e.stdout or e.stderr:
+                # pgrep should not output anything to stdout or stderr
+                raise e
+            return None
diff --git a/packages/antlion/controllers/utils_lib/commands/route.py b/packages/antlion/controllers/utils_lib/commands/route.py
new file mode 100644
index 0000000..f7bb5e0
--- /dev/null
+++ b/packages/antlion/controllers/utils_lib/commands/route.py
@@ -0,0 +1,200 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import ipaddress
+import re
+import subprocess
+from typing import Iterator, Literal
+
+from mobly import signals
+
+from antlion.controllers.utils_lib.commands.command import LinuxCommand
+from antlion.runner import Runner
+
+
+class Error(Exception):
+    """Exception thrown when a valid ip command experiences errors."""
+
+
+class LinuxRouteCommand(LinuxCommand):
+    """Interface for doing standard ip route commands on a linux system."""
+
+    def __init__(self, runner: Runner, binary: str = "ip"):
+        super().__init__(runner, binary)
+
+    def add_route(
+        self,
+        net_interface: str,
+        address: ipaddress.IPv4Interface | ipaddress.IPv6Interface | Literal["default"],
+        proto: str = "static",
+    ) -> None:
+        """Add an entry to the ip routing table.
+
+        Will add a route for either a specific ip address, or a network.
+
+        Args:
+            net_interface: Any packet that sends through this route will be sent
+                using this network interface (eg. wlan0).
+            address: The address to use. If a network is given then the entire
+                subnet will be routed. If "default" is given then this will set the
+                default route.
+            proto: Routing protocol identifier of this route (e.g. kernel,
+                redirect, boot, static, ra). See `man ip-route(8)` for details.
+
+        Raises:
+            NetworkInterfaceDown: Raised when the network interface is down.
+        """
+        try:
+            self._run(
+                [
+                    "route",
+                    "add",
+                    str(address),
+                    "dev",
+                    net_interface,
+                    "proto",
+                    proto,
+                ],
+                sudo=True,
+            )
+        except subprocess.CalledProcessError as e:
+            if "File exists" in e.stderr:
+                raise signals.TestError(
+                    "Route already exists",
+                    extras={
+                        "stderr": e.stderr,
+                        "stdout": e.stdout,
+                        "returncode": e.returncode,
+                    },
+                )
+            if "Network is down" in e.stderr:
+                raise signals.TestError(
+                    "Device must be up for adding a route.",
+                    extras={
+                        "stderr": e.stderr,
+                        "stdout": e.stdout,
+                        "returncode": e.returncode,
+                    },
+                )
+            raise e
+
+    def get_routes(
+        self, net_interface: str | None = None
+    ) -> Iterator[
+        tuple[
+            ipaddress.IPv4Interface | ipaddress.IPv6Interface | Literal["default"], str
+        ]
+    ]:
+        """Get the routes in the ip routing table.
+
+        Args:
+            net_interface: string, If given, only retrieve routes that have
+                           been registered to go through this network
+                           interface (eg. wlan0).
+
+        Returns: An iterator that returns a tuple of (address, net_interface).
+                 If it is the default route then address
+                 will be the "default". If the route is a subnet then
+                 it will be a ipaddress.IPv4Network otherwise it is a
+                 ipaddress.IPv4Address.
+        """
+        result_ipv4 = self._run(["-4", "route", "show"])
+        result_ipv6 = self._run(["-6", "route", "show"])
+
+        lines = result_ipv4.stdout.splitlines() + result_ipv6.stdout.splitlines()
+
+        # Scan through each line for valid route entries
+        # Example output:
+        # default via 192.168.1.254 dev eth0  proto static
+        # 192.168.1.0/24 dev eth0  proto kernel  scope link  src 172.22.100.19  metric 1
+        # 192.168.2.1 dev eth2 proto kernel scope link metric 1
+        # fe80::/64 dev wlan0 proto static metric 1024
+        for line_bytes in lines:
+            line = line_bytes.decode("utf-8")
+            if not "dev" in line:
+                continue
+
+            if line.startswith("default"):
+                # The default route entry is formatted differently.
+                match = re.search("dev (?P<net_interface>\\S+)", line)
+                if not match:
+                    continue
+
+                iface = match.groupdict()["net_interface"]
+                assert isinstance(iface, str)
+
+                if net_interface and iface != net_interface:
+                    continue
+
+                # When there is a match for the route entry pattern create
+                # A pair to hold the info.
+                yield ("default", iface)
+            else:
+                # Test the normal route entry pattern.
+                match = re.search(
+                    "(?P<address>[0-9A-Fa-f\\.\\:/]+) dev (?P<net_interface>\\S+)", line
+                )
+                if not match:
+                    continue
+
+                # When there is a match for the route entry pattern create
+                # A pair to hold the info.
+                d = match.groupdict()
+
+                address_raw = d["address"]
+                assert isinstance(address_raw, str)
+
+                iface = d["net_interface"]
+                assert isinstance(iface, str)
+
+                if net_interface and iface != net_interface:
+                    continue
+
+                yield (ipaddress.ip_interface(address_raw), iface)
+
+    def remove_route(
+        self,
+        address: ipaddress.IPv4Interface | ipaddress.IPv6Interface | Literal["default"],
+        net_interface: str | None = None,
+    ) -> None:
+        """Removes a route from the ip routing table.
+
+        Removes a route from the ip routing table. If the route does not exist
+        nothing is done.
+
+        Args:
+            address: The address of the route to remove.
+            net_interface: If specified the route being removed is registered to
+                go through this network interface (eg. wlan0)
+        """
+        try:
+            args = ["route", "del", str(address)]
+            if net_interface:
+                args += ["dev", net_interface]
+            self._run(args)
+        except subprocess.CalledProcessError as e:
+            if "RTNETLINK answers: No such process" in e.stderr:
+                # The route didn't exist.
+                return
+            raise signals.TestError(f"Failed to delete route {address}: {e}") from e
+
+    def clear_routes(self, net_interface: str) -> None:
+        """Clears all routes.
+
+        Args:
+            net_interface: The network interface to clear routes on.
+        """
+        routes = self.get_routes(net_interface)
+        for a, d in routes:
+            self.remove_route(a, d)
diff --git a/src/antlion/controllers/utils_lib/commands/shell.py b/packages/antlion/controllers/utils_lib/commands/shell.py
similarity index 60%
rename from src/antlion/controllers/utils_lib/commands/shell.py
rename to packages/antlion/controllers/utils_lib/commands/shell.py
index ed105af..cebd166 100644
--- a/src/antlion/controllers/utils_lib/commands/shell.py
+++ b/packages/antlion/controllers/utils_lib/commands/shell.py
@@ -12,11 +12,15 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import annotations
+
+import re
 import shlex
 import signal
 import time
+from typing import Iterator
 
-from antlion.libs.proc import job
+from antlion.runner import CalledProcessError, Runner
 
 
 class ShellCommand(object):
@@ -29,68 +33,40 @@
     Note: At the moment this only works with the ssh runner.
     """
 
-    def __init__(self, runner, working_dir=None):
+    def __init__(self, runner: Runner) -> None:
         """Creates a new shell command invoker.
 
         Args:
             runner: The object that will run the shell commands.
-            working_dir: The directory that all commands should work in,
-                         if none then the runners enviroment default is used.
         """
         self._runner = runner
-        self._working_dir = working_dir
 
-    def run(self, command, timeout=60):
-        """Runs a generic command through the runner.
-
-        Takes the command and prepares it to be run in the target shell using
-        this objects settings.
-
-        Args:
-            command: The command to run.
-            timeout: How long to wait for the command (in seconds).
-
-        Returns:
-            A CmdResult object containing the results of the shell command.
-
-        Raises:
-            job.Error: When the command executed but had an error.
-        """
-        if self._working_dir:
-            command_str = "cd %s; %s" % (self._working_dir, command)
-        else:
-            command_str = command
-
-        return self._runner.run(command_str, timeout=timeout)
-
-    def is_alive(self, identifier):
+    def is_alive(self, identifier: str | int) -> bool:
         """Checks to see if a program is alive.
 
-        Checks to see if a program is alive on the shells enviroment. This can
-        be used to check on generic programs, or a specific program using
-        a pid.
+        Checks to see if a program is alive on the shells environment. This can
+        be used to check on generic programs, or a specific program using a pid.
 
         Args:
-            identifier: string or int, Used to identify the program to check.
-                        if given an int then it is assumed to be a pid. If
-                        given a string then it will be used as a search key
-                        to compare on the running processes.
+            identifier: Used to identify the program to check. if given an int
+                then it is assumed to be a pid. If given a string then it will
+                be used as a search key to compare on the running processes.
         Returns:
-            True if a process was found running, false otherwise.
+            True if a process was found running, otherwise False.
         """
         try:
             if isinstance(identifier, str):
-                self.run("ps aux | grep -v grep | grep %s" % identifier)
-            elif isinstance(identifier, int):
-                self.signal(identifier, 0)
+                ps = self._runner.run(["ps", "aux"])
+                if re.search(identifier, ps.stdout.decode("utf-8")):
+                    return True
+                return False
             else:
-                raise ValueError("Bad type was given for identifier")
-
-            return True
-        except job.Error:
+                self.signal(identifier, 0)
+                return True
+        except CalledProcessError:
             return False
 
-    def get_pids(self, identifier):
+    def get_pids(self, identifier: str) -> Iterator[int]:
         """Gets the pids of a program.
 
         Searches for a program with a specific name and grabs the pids for all
@@ -103,15 +79,15 @@
                   if no pids were found.
         """
         try:
-            result = self.run("ps aux | grep -v grep | grep %s" % identifier)
-        except job.Error as e:
-            if e.result.exit_status == 1:
+            ps = self._runner.run(["ps", "aux"])
+        except CalledProcessError as e:
+            if e.returncode == 1:
                 # Grep returns exit status 1 when no lines are selected. This is
                 # an expected return code.
                 return
             raise e
 
-        lines = result.stdout.splitlines()
+        lines = ps.stdout.decode("utf-8").splitlines()
 
         # The expected output of the above command is like so:
         # bob    14349  0.0  0.0  34788  5552 pts/2    Ss   Oct10   0:03 bash
@@ -119,13 +95,16 @@
         # Where the format is:
         # USER    PID  ...
         for line in lines:
+            if re.search(identifier, line) is None:
+                continue
+
             pieces = line.split()
             try:
                 yield int(pieces[1])
             except StopIteration:
                 return
 
-    def search_file(self, search_string, file_name):
+    def search_file(self, search_string: str, file_name: str) -> bool:
         """Searches through a file for a string.
 
         Args:
@@ -136,12 +115,12 @@
             True if the string or pattern was found, False otherwise.
         """
         try:
-            self.run("grep %s %s" % (shlex.quote(search_string), file_name))
+            self._runner.run(["grep", shlex.quote(search_string), file_name])
             return True
-        except job.Error:
+        except CalledProcessError:
             return False
 
-    def read_file(self, file_name):
+    def read_file(self, file_name: str) -> str:
         """Reads a file through the shell.
 
         Args:
@@ -150,49 +129,41 @@
         Returns:
             A string of the files contents.
         """
-        return self.run("cat %s" % file_name).stdout
+        return self._runner.run(["cat", file_name]).stdout.decode("utf-8")
 
-    def write_file(self, file_name, data):
+    def write_file(self, file_name: str, data: str) -> None:
         """Writes a block of data to a file through the shell.
 
         Args:
             file_name: The name of the file to write to.
             data: The string of data to write.
         """
-        return self.run("echo %s > %s" % (shlex.quote(data), file_name))
+        # Intentionally not passed through shlex.escape() to allow stdin
+        # redirection to a remote file.
+        self._runner.run(["cat", "-", ">", file_name], stdin=data.encode("utf-8"))
 
-    def append_file(self, file_name, data):
-        """Appends a block of data to a file through the shell.
-
-        Args:
-            file_name: The name of the file to write to.
-            data: The string of data to write.
-        """
-        return self.run("echo %s >> %s" % (shlex.quote(data), file_name))
-
-    def touch_file(self, file_name):
+    def touch_file(self, file_name: str) -> None:
         """Creates a file through the shell.
 
         Args:
             file_name: The name of the file to create.
         """
-        self.write_file(file_name, "")
+        self._runner.run(["touch", file_name])
 
-    def delete_file(self, file_name):
+    def delete_file(self, file_name: str) -> None:
         """Deletes a file through the shell.
 
         Args:
             file_name: The name of the file to delete.
         """
         try:
-            self.run("rm -r %s" % file_name)
-        except job.Error as e:
-            if "No such file or directory" in e.result.stderr:
+            self._runner.run(["rm", "-r", file_name])
+        except CalledProcessError as e:
+            if b"No such file or directory" in e.stderr:
                 return
+            raise e
 
-            raise
-
-    def kill(self, identifier, timeout=10):
+    def kill(self, identifier: str | int, timeout_sec: int = 10) -> None:
         """Kills a program or group of programs through the shell.
 
         Kills all programs that match an identifier through the shell. This
@@ -204,8 +175,8 @@
 
         Args:
             identifier: A string used to identify the program.
-            timeout: The time to wait for all programs to die. Each signal will
-                     take an equal portion of this time.
+            timeout_sec: The time to wait for all programs to die. Each signal
+                will take an equal portion of this time.
         """
         if isinstance(identifier, int):
             pids = [identifier]
@@ -214,12 +185,12 @@
 
         signal_queue = [signal.SIGINT, signal.SIGTERM, signal.SIGKILL]
 
-        signal_duration = timeout / len(signal_queue)
+        signal_duration = timeout_sec / len(signal_queue)
         for sig in signal_queue:
             for pid in pids:
                 try:
                     self.signal(pid, sig)
-                except job.Error:
+                except CalledProcessError:
                     pass
 
             start_time = time.time()
@@ -230,7 +201,7 @@
             if not pids:
                 break
 
-    def signal(self, pid, sig):
+    def signal(self, pid: int, sig: int) -> None:
         """Sends a specific signal to a program.
 
         Args:
@@ -238,7 +209,7 @@
             sig: The signal to send.
 
         Raises:
-            job.Error: Raised when the signal fail to reach
+            CalledProcessError: Raised when the signal fail to reach
                        the specified program.
         """
-        self.run("kill -%d %d" % (sig, pid))
+        self._runner.run(["kill", f"-{sig}", str(pid)])
diff --git a/packages/antlion/controllers/utils_lib/commands/tcpdump.py b/packages/antlion/controllers/utils_lib/commands/tcpdump.py
new file mode 100644
index 0000000..f15e8cd
--- /dev/null
+++ b/packages/antlion/controllers/utils_lib/commands/tcpdump.py
@@ -0,0 +1,124 @@
+# Copyright 2024 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+import time
+from io import BufferedRandom
+from pathlib import Path
+from subprocess import Popen
+from types import TracebackType
+
+from mobly.logger import epoch_to_log_line_timestamp, normalize_log_line_timestamp
+
+from antlion import utils
+from antlion.controllers.utils_lib.commands.command import LinuxCommand
+from antlion.runner import Runner
+
+# Max time to wait for tcpdump to terminate after sending SIGTERM.
+TERMINATE_TIMEOUT_SEC: float = 5.0
+
+
+class LinuxTcpdumpCommand(LinuxCommand):
+    """Dump traffic on a network."""
+
+    def __init__(self, runner: Runner, binary: str = "tcpdump") -> None:
+        super().__init__(runner, binary)
+
+    def start(self, interface: str, output_dir: Path) -> TcpdumpProcess:
+        """Start tcpdump.
+
+        Args:
+            interface: Listen on this interface.
+            path: Path to output directory
+
+        Returns:
+            A context manager to run tcpdump. Must be used in a with statement
+            for the process to start and exit correctly.
+        """
+        time_stamp = normalize_log_line_timestamp(
+            epoch_to_log_line_timestamp(utils.get_current_epoch_time())
+        )
+        return TcpdumpProcess(
+            self, interface, pcap=Path(output_dir, f"tcpdump_{time_stamp}.pcap")
+        )
+
+
+class TcpdumpProcess:
+    """Process running tcpdump."""
+
+    def __init__(
+        self,
+        tcpdump: LinuxTcpdumpCommand,
+        interface: str,
+        pcap: Path,
+    ) -> None:
+        self._tcpdump = tcpdump
+        self._log = tcpdump._runner.log
+        self._interface = interface
+        self._pcap_path = pcap
+        self._pcap_file: BufferedRandom | None = None
+        self._process: Popen[bytes] | None = None
+
+    def __enter__(self) -> None:
+        self._log.info(
+            "Streaming %s packet capture to %s", self._interface, self._pcap_path
+        )
+        self._pcap_file = self._pcap_path.open("w+b")
+        self._process = self._tcpdump._start(
+            [
+                "-i",
+                self._interface,
+                # Stream pcap as bytes to stdout
+                "-w",
+                "-",
+            ],
+            sudo=True,
+            stdout=self._pcap_file,
+        )
+
+    def __exit__(
+        self,
+        _exit_type: type[BaseException] | None,
+        _exit_value: BaseException | None,
+        _exit_traceback: TracebackType | None,
+    ) -> None:
+        if self._pcap_file is None or self._process is None:
+            # tcpdump is not running.
+            return
+
+        self._process.terminate()
+        timeout = time.time() + TERMINATE_TIMEOUT_SEC
+        while time.time() < timeout:
+            exit_code = self._process.poll()
+            if exit_code is not None:
+                self._pcap_file.close()
+                self._pcap_file = None
+                break
+        else:
+            self._process.kill()
+            self._pcap_file.close()
+            self._pcap_file = None
+            raise TimeoutError("tcpdump did not terminate after sending SIGTERM")
+
+        self._log.info(
+            "%s packet capture wrote to %s", self._interface, self._pcap_path
+        )
+
+        _, stderr = self._process.communicate()
+        self._log.debug(
+            "tcpdump returned with status %i\nstderr: %s",
+            exit_code,
+            stderr.decode("utf-8", errors="replace"),
+        )
diff --git a/src/antlion/controllers/utils_lib/ssh/__init__.py b/packages/antlion/controllers/utils_lib/ssh/__init__.py
similarity index 100%
rename from src/antlion/controllers/utils_lib/ssh/__init__.py
rename to packages/antlion/controllers/utils_lib/ssh/__init__.py
diff --git a/packages/antlion/controllers/utils_lib/ssh/connection.py b/packages/antlion/controllers/utils_lib/ssh/connection.py
new file mode 100644
index 0000000..0344444
--- /dev/null
+++ b/packages/antlion/controllers/utils_lib/ssh/connection.py
@@ -0,0 +1,336 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import os
+import re
+import shutil
+import subprocess
+import tempfile
+import threading
+import time
+from typing import IO
+
+from mobly import logger
+
+from antlion.controllers.utils_lib.ssh import formatter
+from antlion.libs.proc import job
+from antlion.runner import CalledProcessError, CalledProcessTransportError, Runner
+
+
+class SshConnection(Runner):
+    """Provides a connection to a remote machine through ssh.
+
+    Provides the ability to connect to a remote machine and execute a command
+    on it. The connection will try to establish a persistent connection When
+    a command is run. If the persistent connection fails it will attempt
+    to connect normally.
+    """
+
+    @property
+    def socket_path(self):
+        """Returns: The os path to the master socket file."""
+        if self._master_ssh_tempdir is None:
+            raise AttributeError(
+                "socket_path is not available yet; run setup_master_ssh() first"
+            )
+        return os.path.join(self._master_ssh_tempdir, "socket")
+
+    def __init__(self, settings):
+        """
+        Args:
+            settings: The ssh settings to use for this connection.
+            formatter: The object that will handle formatting ssh command
+                       for use with the background job.
+        """
+        self._settings = settings
+        self._formatter = formatter.SshFormatter()
+        self._lock = threading.Lock()
+        self._master_ssh_proc = None
+        self._master_ssh_tempdir: str | None = None
+
+        self.log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: f"[SshConnection | {self._settings.hostname}]",
+            },
+        )
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, _, __, ___):
+        self.close()
+
+    def __del__(self):
+        self.close()
+
+    def setup_master_ssh(self, timeout_sec: int = 5):
+        """Sets up the master ssh connection.
+
+        Sets up the initial master ssh connection if it has not already been
+        started.
+
+        Args:
+            timeout_sec: The time to wait for the master ssh connection to
+                be made.
+
+        Raises:
+            Error: When setting up the master ssh connection fails.
+        """
+        with self._lock:
+            if self._master_ssh_proc is not None:
+                socket_path = self.socket_path
+                if (
+                    not os.path.exists(socket_path)
+                    or self._master_ssh_proc.poll() is not None
+                ):
+                    self.log.debug(
+                        "Master ssh connection to %s is down.", self._settings.hostname
+                    )
+                    self._cleanup_master_ssh()
+
+            if self._master_ssh_proc is None:
+                # Create a shared socket in a temp location.
+                self._master_ssh_tempdir = tempfile.mkdtemp(prefix="ssh-master")
+
+                # Setup flags and options for running the master ssh
+                # -N: Do not execute a remote command.
+                # ControlMaster: Spawn a master connection.
+                # ControlPath: The master connection socket path.
+                extra_flags: dict[str, str | int | None] = {"-N": None}
+                extra_options: dict[str, str | int | bool] = {
+                    "ControlMaster": True,
+                    "ControlPath": self.socket_path,
+                    "BatchMode": True,
+                }
+
+                # Construct the command and start it.
+                master_cmd = self._formatter.format_ssh_local_command(
+                    self._settings, extra_flags=extra_flags, extra_options=extra_options
+                )
+                self.log.info("Starting master ssh connection.")
+                self._master_ssh_proc = job.run_async(master_cmd)
+
+                end_time = time.time() + timeout_sec
+
+                while time.time() < end_time:
+                    if os.path.exists(self.socket_path):
+                        break
+                    time.sleep(0.2)
+                else:
+                    self._cleanup_master_ssh()
+                    raise CalledProcessTransportError(
+                        "Master ssh connection timed out."
+                    )
+
+    def run(
+        self,
+        command: str | list[str],
+        stdin: bytes | None = None,
+        timeout_sec: float | None = 60.0,
+        log_output: bool = True,
+        ignore_status: bool = False,
+        attempts: int = 2,
+    ) -> subprocess.CompletedProcess[bytes]:
+        """Runs a remote command over ssh.
+
+        Will ssh to a remote host and run a command. This method will
+        block until the remote command is finished.
+
+        Args:
+            command: The command to execute over ssh.
+            stdin: Standard input to command.
+            timeout_sec: seconds to wait for command to finish.
+            log_output: If true, print stdout and stderr to the debug log.
+            ignore_status: True to ignore the exit code of the remote
+                           subprocess.  Note that if you do ignore status codes,
+                           you should handle non-zero exit codes explicitly.
+            attempts: Number of attempts before giving up on command failures.
+
+        Returns:
+            Results of the ssh command.
+
+        Raises:
+            CalledProcessError: when the process exits with a non-zero status
+                and ignore_status is False.
+            subprocess.TimeoutExpired: When the remote command took to long to
+                execute.
+            CalledProcessTransportError: when the underlying transport fails
+        """
+        if attempts < 1:
+            raise TypeError("attempts must be a positive, non-zero integer")
+
+        try:
+            self.setup_master_ssh(self._settings.connect_timeout)
+        except CalledProcessTransportError:
+            self.log.warning(
+                "Failed to create master ssh connection, using "
+                "normal ssh connection."
+            )
+
+        extra_options: dict[str, str | int | bool] = {"BatchMode": True}
+        if self._master_ssh_proc:
+            extra_options["ControlPath"] = self.socket_path
+
+        if isinstance(command, list):
+            full_command = " ".join(command)
+        else:
+            full_command = command
+
+        terminal_command = self._formatter.format_command(
+            full_command, self._settings, extra_options=extra_options
+        )
+
+        dns_retry_count = 2
+        while True:
+            try:
+                result = job.run(
+                    terminal_command,
+                    stdin=stdin,
+                    log_output=log_output,
+                    timeout_sec=timeout_sec,
+                )
+
+                return subprocess.CompletedProcess(
+                    terminal_command,
+                    result.returncode,
+                    result.stdout,
+                    result.stderr,
+                )
+            except CalledProcessError as e:
+                # Check for SSH errors.
+                if e.returncode == 255:
+                    stderr = e.stderr.decode("utf-8", errors="replace")
+
+                    had_dns_failure = re.search(
+                        r"^ssh: .*: Name or service not known",
+                        stderr,
+                        flags=re.MULTILINE,
+                    )
+                    if had_dns_failure:
+                        dns_retry_count -= 1
+                        if not dns_retry_count:
+                            raise CalledProcessTransportError(
+                                "DNS failed to find host"
+                            ) from e
+                        self.log.debug("Failed to connect to host, retrying...")
+                        continue
+
+                    had_timeout = re.search(
+                        r"^ssh: connect to host .* port .*: "
+                        r"Connection timed out\r$",
+                        stderr,
+                        flags=re.MULTILINE,
+                    )
+                    if had_timeout:
+                        raise CalledProcessTransportError("Ssh timed out") from e
+
+                    permission_denied = "Permission denied" in stderr
+                    if permission_denied:
+                        raise CalledProcessTransportError("Permission denied") from e
+
+                    unknown_host = re.search(
+                        r"ssh: Could not resolve hostname .*: "
+                        r"Name or service not known",
+                        stderr,
+                        flags=re.MULTILINE,
+                    )
+                    if unknown_host:
+                        raise CalledProcessTransportError("Unknown host") from e
+
+                    # Retry unknown SSH errors.
+                    self.log.error(f"An unknown error has occurred. Job result: {e}")
+                    ping_output = job.run(
+                        ["ping", self._settings.hostname, "-c", "3", "-w", "1"],
+                        ignore_status=True,
+                    )
+                    self.log.error(f"Ping result: {ping_output}")
+                    if attempts > 1:
+                        self._cleanup_master_ssh()
+                        self.run(
+                            command,
+                            stdin,
+                            timeout_sec,
+                            log_output,
+                            ignore_status,
+                            attempts - 1,
+                        )
+                    raise CalledProcessTransportError(
+                        "The job failed for unknown reasons"
+                    ) from e
+
+                if not ignore_status:
+                    raise e
+
+    def run_async(self, command: str) -> subprocess.CompletedProcess[bytes]:
+        """Starts up a background command over ssh.
+
+        Will ssh to a remote host and startup a command. This method will
+        block until there is confirmation that the remote command has started.
+
+        Args:
+            command: The command to execute over ssh. Can be either a string
+                     or a list.
+
+        Returns:
+            The result of the command to launch the background job.
+
+        Raises:
+            CalledProcessError: when the process fails to start
+            subprocess.TimeoutExpired: when the timeout expires while waiting
+                for a child process
+            CalledProcessTransportError: when the underlying transport fails
+        """
+        return self.run(f"({command}) < /dev/null > /dev/null 2>&1 & echo -n $!")
+
+    def start(
+        self,
+        command: list[str],
+        stdout: IO[bytes] | int = subprocess.PIPE,
+        stdin: IO[bytes] | int = subprocess.PIPE,
+    ) -> subprocess.Popen[bytes]:
+        """Execute a child program in a new process."""
+        extra_options: dict[str, str | int | bool] = {"BatchMode": True}
+        if self._master_ssh_proc:
+            extra_options["ControlPath"] = self.socket_path
+
+        terminal_command = self._formatter.format_command(
+            " ".join(command),
+            self._settings,
+            extra_options=extra_options,
+        )
+        return subprocess.Popen(terminal_command, stdout=stdout, stdin=stdin)
+
+    def close(self) -> None:
+        """Clean up open connections to remote host."""
+        self._cleanup_master_ssh()
+
+    def _cleanup_master_ssh(self) -> None:
+        """
+        Release all resources (process, temporary directory) used by an active
+        master SSH connection.
+        """
+        # If a master SSH connection is running, kill it.
+        if self._master_ssh_proc is not None:
+            self.log.debug("Nuking master_ssh_job.")
+            self._master_ssh_proc.kill()
+            self._master_ssh_proc.wait()
+            self._master_ssh_proc = None
+
+        # Remove the temporary directory for the master SSH socket.
+        if self._master_ssh_tempdir is not None:
+            self.log.debug("Cleaning master_ssh_tempdir.")
+            shutil.rmtree(self._master_ssh_tempdir)
+            self._master_ssh_tempdir = None
diff --git a/src/antlion/controllers/utils_lib/ssh/formatter.py b/packages/antlion/controllers/utils_lib/ssh/formatter.py
similarity index 65%
rename from src/antlion/controllers/utils_lib/ssh/formatter.py
rename to packages/antlion/controllers/utils_lib/ssh/formatter.py
index 41450ca..c753c6d 100644
--- a/src/antlion/controllers/utils_lib/ssh/formatter.py
+++ b/packages/antlion/controllers/utils_lib/ssh/formatter.py
@@ -13,13 +13,18 @@
 # limitations under the License.
 
 
+from typing import Iterator
+
+from antlion.controllers.utils_lib.ssh.settings import SshSettings
+
+
 class SshFormatter(object):
     """Handles formatting ssh commands.
 
     Handler for formatting chunks of the ssh command to run.
     """
 
-    def format_ssh_executable(self, settings):
+    def format_ssh_executable(self, settings: SshSettings) -> str:
         """Format the executable name.
 
         Formats the executable name as a string.
@@ -32,7 +37,7 @@
         """
         return settings.executable
 
-    def format_host_name(self, settings):
+    def format_host_name(self, settings: SshSettings) -> str:
         """Format hostname.
 
         Formats the hostname to connect to.
@@ -43,9 +48,9 @@
         Returns:
             A string of the connection host name to connect to.
         """
-        return "%s@%s" % (settings.username, settings.hostname)
+        return f"{settings.username}@{settings.hostname}"
 
-    def format_value(self, value):
+    def format_value(self, value: object) -> str:
         """Formats a command line value.
 
         Takes in a value and formats it so it can be safely used in the
@@ -62,7 +67,9 @@
 
         return str(value)
 
-    def format_options_list(self, options):
+    def format_options_list(
+        self, options: dict[str, str | int | bool]
+    ) -> Iterator[str]:
         """Format the option list.
 
         Formats a dictionary of options into a list of strings to be used
@@ -78,16 +85,16 @@
             option = options[option_name]
 
             yield "-o"
-            yield "%s=%s" % (option_name, self.format_value(option))
+            yield f"{option_name}={self.format_value(option)}"
 
-    def format_flag_list(self, flags):
+    def format_flag_list(self, flags: dict[str, str | int | None]) -> Iterator[str]:
         """Format the flags list.
 
         Formats a dictionary of flags into a list of strings to be used
         on the command line.
 
         Args:
-            flags: A dictonary of options.
+            flags: A dictionary of options.
 
         Returns:
             An iterator of strings that should be used on the command line.
@@ -99,7 +106,12 @@
             if flag is not None:
                 yield self.format_value(flag)
 
-    def format_ssh_local_command(self, settings, extra_flags={}, extra_options={}):
+    def format_ssh_local_command(
+        self,
+        settings: SshSettings,
+        extra_flags: dict[str, str | int | None] | None = None,
+        extra_options: dict[str, str | int | bool] | None = None,
+    ) -> list[str]:
         """Formats the local part of the ssh command.
 
         Formats the local section of the ssh command. This is the part of the
@@ -108,13 +120,18 @@
 
         Args:
             settings: The ssh settings.
-            extra_flags: Extra flags to inlcude.
+            extra_flags: Extra flags to include.
             extra_options: Extra options to include.
 
         Returns:
             An array of strings that make up the command and its local
             arguments.
         """
+        if extra_flags is None:
+            extra_flags = {}
+        if extra_options is None:
+            extra_options = {}
+
         options = settings.construct_ssh_options()
         for extra_option_name in extra_options:
             options[extra_option_name] = extra_options[extra_option_name]
@@ -133,56 +150,13 @@
 
         return base_command
 
-    def format_ssh_command(
-        self, remote_command, settings, extra_flags={}, extra_options={}
-    ):
-        """Formats the full ssh command.
-
-        Creates the full format for an ssh command.
-
-        Args:
-            remote_command: A string that represents the remote command to
-                            execute.
-            settings: The ssh settings to use.
-            extra_flags: Extra flags to include in the settings.
-            extra_options: Extra options to include in the settings.
-
-        Returns:
-            A list of strings that make up the total ssh command.
-        """
-        local_command = self.format_ssh_local_command(
-            settings, extra_flags, extra_options
-        )
-
-        local_command.append(remote_command)
-        return local_command
-
-    def format_remote_command(self, command, env):
-        """Formats the remote part of the ssh command.
-
-        Formatts the command that will run on the remote machine.
-
-        Args:
-            command: string, The command to be executed.
-            env: Enviroment variables to add to the remote envirment.
-
-        Returns:
-            A string that represents the command line to execute on the remote
-            machine.
-        """
-        if not env:
-            env_str = ""
-        else:
-            env_str = "export "
-            for name in env:
-                value = env[name]
-                env_str += "%s=%s " % (name, str(value))
-            env_str += ";"
-
-        execution_line = "%s %s;" % (env_str, command)
-        return execution_line
-
-    def format_command(self, command, env, settings, extra_flags={}, extra_options={}):
+    def format_command(
+        self,
+        command: str,
+        settings: SshSettings,
+        extra_flags: dict[str, str | int | None] | None = None,
+        extra_options: dict[str, str | int | bool] | None = None,
+    ) -> list[str]:
         """Formats a full command.
 
         Formats the full command to run in order to run a command on a remote
@@ -190,13 +164,21 @@
 
         Args:
             command: The command to run on the remote machine. Can either be
-                     a string or a list.
-            env: The enviroment variables to include on the remote machine.
+                     a string or a list of strings.
+            env: The environment variables to include on the remote machine.
             settings: The ssh settings to use.
             extra_flags: Extra flags to include with the settings.
             extra_options: Extra options to include with the settings.
+
+        Returns:
+            A list of strings that make up the total ssh command.
         """
-        remote_command = self.format_remote_command(command, env)
-        return self.format_ssh_command(
-            remote_command, settings, extra_flags, extra_options
+        if extra_flags is None:
+            extra_flags = {}
+        if extra_options is None:
+            extra_options = {}
+
+        local_command = self.format_ssh_local_command(
+            settings, extra_flags, extra_options
         )
+        return local_command + [command]
diff --git a/src/antlion/controllers/utils_lib/ssh/settings.py b/packages/antlion/controllers/utils_lib/ssh/settings.py
similarity index 72%
rename from src/antlion/controllers/utils_lib/ssh/settings.py
rename to packages/antlion/controllers/utils_lib/ssh/settings.py
index ead5844..725ade7 100644
--- a/src/antlion/controllers/utils_lib/ssh/settings.py
+++ b/packages/antlion/controllers/utils_lib/ssh/settings.py
@@ -20,7 +20,8 @@
     An instance of SshSettings or None
 """
 
-from typing import Dict, Optional, Union
+from antlion.types import Json
+from antlion.validation import MapValidator
 
 
 class SshSettings(object):
@@ -44,13 +45,13 @@
         self,
         hostname: str,
         username: str,
+        identity_file: str,
         port: int = 22,
         host_file: str = "/dev/null",
         connect_timeout: int = 30,
         alive_interval: int = 300,
         executable: str = "/usr/bin/ssh",
-        identity_file: Optional[str] = None,
-        ssh_config: Optional[str] = None,
+        ssh_config: str | None = None,
     ):
         self.username = username
         self.hostname = hostname
@@ -62,7 +63,7 @@
         self.identity_file = identity_file
         self.ssh_config = ssh_config
 
-    def construct_ssh_options(self) -> Dict[str, Union[str, int, bool]]:
+    def construct_ssh_options(self) -> dict[str, str | int | bool]:
         """Construct the ssh options.
 
         Constructs a dictionary of option that should be used with the ssh
@@ -71,14 +72,14 @@
         Returns:
             A dictionary of option name to value.
         """
-        current_options = {}
+        current_options: dict[str, str | int | bool] = {}
         current_options["StrictHostKeyChecking"] = False
         current_options["UserKnownHostsFile"] = self.host_file
         current_options["ConnectTimeout"] = self.connect_timeout
         current_options["ServerAliveInterval"] = self.alive_interval
         return current_options
 
-    def construct_ssh_flags(self) -> Dict[str, Union[None, str, int]]:
+    def construct_ssh_flags(self) -> dict[str, None | str | int]:
         """Construct the ssh flags.
 
         Constructs what flags should be used in the ssh connection.
@@ -87,7 +88,7 @@
             A dictionary of flag name to value. If value is none then it is
             treated as a binary flag.
         """
-        current_flags = {}
+        current_flags: dict[str, None | str | int] = {}
         current_flags["-a"] = None
         current_flags["-x"] = None
         current_flags["-p"] = self.port
@@ -98,28 +99,19 @@
         return current_flags
 
 
-def from_config(config: Dict[str, Union[str, int]]) -> Optional[SshSettings]:
-    if config is None:
-        return None  # Having no settings is not an error
+def from_config(config: Json) -> SshSettings:
+    """Parse SSH settings from config JSON."""
 
-    ssh_binary_path = config.get("ssh_binary_path", "/usr/bin/ssh")
-    user = config.get("user", None)
-    host = config.get("host", None)
-    port = config.get("port", 22)
-    identity_file = config.get("identity_file", None)
-    ssh_config = config.get("ssh_config", None)
-    connect_timeout = config.get("connect_timeout", 30)
-    if user is None or host is None:
-        raise ValueError(
-            f"Malformed SSH config did not include user and host keys: {config}"
-        )
+    if not isinstance(config, dict):
+        raise ValueError(f"config must be a dict, got {type(config)}")
 
+    c = MapValidator(config)
     return SshSettings(
-        host,
-        user,
-        port=port,
-        identity_file=identity_file,
-        ssh_config=ssh_config,
-        connect_timeout=connect_timeout,
-        executable=ssh_binary_path,
+        hostname=c.get(str, "host"),
+        username=c.get(str, "user"),
+        identity_file=c.get(str, "identity_file"),
+        port=c.get(int, "port", 22),
+        ssh_config=c.get(str, "ssh_config", None),
+        connect_timeout=c.get(int, "connect_timeout", 30),
+        executable=c.get(str, "ssh_binary_path", "/usr/bin/ssh"),
     )
diff --git a/packages/antlion/decorators.py b/packages/antlion/decorators.py
new file mode 100644
index 0000000..4564978
--- /dev/null
+++ b/packages/antlion/decorators.py
@@ -0,0 +1,126 @@
+#!/usr/bin/env python3
+#
+# Copyright 2023 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+import typing
+from threading import RLock
+from typing import Callable, Generic, TypeVar
+
+S = TypeVar("S")
+T = TypeVar("T")
+O = TypeVar("O")
+
+
+_NOT_FOUND = object()
+
+
+class cached_property(Generic[T, S]):  # pylint: disable=invalid-name
+    """A property whose value is computed then cached; deleter can be overridden.
+
+    Similar to functools.cached_property(), with the addition of deleter function that
+    can be overridden to provide custom clean up. The deleter function doesn't throw an
+    AttributeError if the value doesn't already exist.
+
+    Useful for properties that are tied to the lifetime of a device and need to be
+    recomputed upon reboot of said device.
+
+    Example:
+
+    ```
+    class LinuxDevice:
+        @cached_property
+        def ssh(self) -> SSH:
+            return SSH(self.ip)
+
+        @ssh.deleter
+        def ssh(self, ssh: SSH) -> None:
+            ssh.terminate_connections()
+    ```
+    """
+
+    def __init__(
+        self, func: Callable[[S], T], deleter: Callable[[S, T], None] | None = None
+    ) -> None:
+        self.func = func
+        self._deleter = deleter
+        self.name: str | None = None
+        self.__doc__ = func.__doc__
+        self.lock = RLock()
+
+    def __set_name__(self, owner: O, name: str) -> None:
+        if self.name is None:
+            self.name = name
+        elif name != self.name:
+            raise TypeError(
+                "Cannot assign the same cached_property to two different names "
+                f"({self.name!r} and {name!r})."
+            )
+
+    def _cache(self, instance: S) -> dict[str, object]:
+        if self.name is None:
+            raise TypeError(
+                "Cannot use cached_property instance without calling __set_name__ on it."
+            )
+        try:
+            return instance.__dict__
+        except (
+            AttributeError
+        ):  # not all objects have __dict__ (e.g. class defines slots)
+            msg = (
+                f"No '__dict__' attribute on {type(instance).__name__!r} "
+                f"instance to cache {self.name!r} property."
+            )
+            raise TypeError(msg) from None
+
+    def __get__(self, instance: S, owner: O | None = None) -> T:
+        cache = self._cache(instance)
+        assert self.name is not None
+        val = cache.get(self.name, _NOT_FOUND)
+        if val is _NOT_FOUND:
+            with self.lock:
+                # check if another thread filled cache while we awaited lock
+                val = cache.get(self.name, _NOT_FOUND)
+                if val is _NOT_FOUND:
+                    val = self.func(instance)
+                    try:
+                        cache[self.name] = val
+                    except TypeError:
+                        msg = (
+                            f"The '__dict__' attribute on {type(instance).__name__!r} instance "
+                            f"does not support item assignment for caching {self.name!r} property."
+                        )
+                        raise TypeError(msg) from None
+                    return val
+        return typing.cast(T, val)
+
+    def __delete__(self, instance: S) -> None:
+        cache = self._cache(instance)
+        assert self.name is not None
+        with self.lock:
+            val = cache.pop(self.name, _NOT_FOUND)
+            if val is _NOT_FOUND:
+                return
+            if self._deleter:
+                self._deleter(instance, typing.cast(T, val))
+
+    def deleter(self, deleter: Callable[[S, T], None]) -> cached_property:
+        self._deleter = deleter
+        prop = type(self)(self.func, deleter)
+        prop.name = self.name
+        prop.__doc__ = self.__doc__
+        prop.lock = self.lock
+        return prop
diff --git a/src/antlion/error.py b/packages/antlion/error.py
similarity index 93%
rename from src/antlion/error.py
rename to packages/antlion/error.py
index bf69b7d..e4f0a3c 100644
--- a/src/antlion/error.py
+++ b/packages/antlion/error.py
@@ -1,10 +1,10 @@
 """This class is where error information will be stored.
 """
 
-from antlion.signals import TestError
+from mobly import signals
 
 
-class ActsError(TestError):
+class ActsError(signals.TestError):
     """Base Acts Error"""
 
     def __init__(self, *args, **kwargs):
diff --git a/src/antlion/event/__init__.py b/packages/antlion/event/__init__.py
similarity index 100%
rename from src/antlion/event/__init__.py
rename to packages/antlion/event/__init__.py
diff --git a/packages/antlion/event/decorators.py b/packages/antlion/event/decorators.py
new file mode 100644
index 0000000..42b6dca
--- /dev/null
+++ b/packages/antlion/event/decorators.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from antlion.event.subscription_handle import StaticSubscriptionHandle
+
+
+def subscribe_static(event_type, event_filter=None, order=0):
+    """A decorator that subscribes a static or module-level function.
+
+    This function must be registered manually.
+    """
+
+    class InnerSubscriptionHandle(StaticSubscriptionHandle):
+        def __init__(self, func):
+            super().__init__(event_type, func, event_filter=event_filter, order=order)
+
+    return InnerSubscriptionHandle
diff --git a/src/antlion/event/event.py b/packages/antlion/event/event.py
similarity index 100%
rename from src/antlion/event/event.py
rename to packages/antlion/event/event.py
diff --git a/src/antlion/event/event_bus.py b/packages/antlion/event/event_bus.py
similarity index 99%
rename from src/antlion/event/event_bus.py
rename to packages/antlion/event/event_bus.py
index 5488b80..c9ec9f0 100644
--- a/src/antlion/event/event_bus.py
+++ b/packages/antlion/event/event_bus.py
@@ -14,8 +14,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 import bisect
-import logging
 import inspect
+import logging
 from threading import RLock
 
 from antlion.event.event_subscription import EventSubscription
@@ -292,5 +292,5 @@
             self.event_type, self.func, filter_fn=self.filter_fn, order=self.order
         )
 
-    def __exit__(self, *unused):
+    def __exit__(self, *_):
         _event_bus.unregister(self.registration_id)
diff --git a/src/antlion/event/event_subscription.py b/packages/antlion/event/event_subscription.py
similarity index 100%
rename from src/antlion/event/event_subscription.py
rename to packages/antlion/event/event_subscription.py
diff --git a/src/antlion/event/subscription_handle.py b/packages/antlion/event/subscription_handle.py
similarity index 95%
rename from src/antlion/event/subscription_handle.py
rename to packages/antlion/event/subscription_handle.py
index 6aa9c3c..3c6a0cc 100644
--- a/src/antlion/event/subscription_handle.py
+++ b/packages/antlion/event/subscription_handle.py
@@ -71,9 +71,5 @@
         return self._func(*args, **kwargs)
 
 
-class InstanceSubscriptionHandle(SubscriptionHandle):
-    """A SubscriptionHandle for instance methods."""
-
-
 class StaticSubscriptionHandle(SubscriptionHandle):
     """A SubscriptionHandle for static methods."""
diff --git a/src/antlion/keys.py b/packages/antlion/keys.py
similarity index 94%
rename from src/antlion/keys.py
rename to packages/antlion/keys.py
index b545d44..c41d1e1 100644
--- a/src/antlion/keys.py
+++ b/packages/antlion/keys.py
@@ -13,13 +13,12 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
-import enum
-
 """This module has the global key values that are used across framework
 modules.
 """
 
+import enum
+
 
 class Config(enum.Enum):
     """Enum values for test config related lookups."""
@@ -49,7 +48,6 @@
     key_iperf_server = "IPerfServer"
     key_openwrt_ap = "OpenWrtAP"
     key_packet_capture = "PacketCapture"
-    key_packet_sender = "PacketSender"
     key_pdu = "PduDevice"
     key_sniffer = "Sniffer"
     # Internal keys, used internally, not exposed to user's config files.
@@ -68,7 +66,6 @@
     m_key_iperf_server = "iperf_server"
     m_key_openwrt_ap = "openwrt_ap"
     m_key_packet_capture = "packet_capture"
-    m_key_packet_sender = "packet_sender"
     m_key_pdu = "pdu"
     m_key_sniffer = "sniffer"
 
@@ -87,25 +84,24 @@
         key_iperf_server,
         key_openwrt_ap,
         key_packet_capture,
-        key_packet_sender,
         key_pdu,
         key_sniffer,
     ]
 
 
-def get_name_by_value(value):
+def get_name_by_value(value: str) -> str | None:
     for name, member in Config.__members__.items():
         if member.value == value:
             return name
     return None
 
 
-def get_module_name(name_in_config):
+def get_module_name(name_in_config: str) -> str | None:
     """Translates the name of a controller in config file to its module name."""
     return value_to_value(name_in_config, "m_%s")
 
 
-def value_to_value(ref_value, pattern):
+def value_to_value(ref_value: str, pattern: str) -> str | None:
     """Translates the value of a key to the value of its corresponding key. The
     corresponding key is chosen based on the variable name pattern.
     """
diff --git a/src/antlion/libs/__init__.py b/packages/antlion/libs/__init__.py
similarity index 100%
rename from src/antlion/libs/__init__.py
rename to packages/antlion/libs/__init__.py
diff --git a/src/antlion/libs/logging/__init__.py b/packages/antlion/libs/logging/__init__.py
similarity index 100%
rename from src/antlion/libs/logging/__init__.py
rename to packages/antlion/libs/logging/__init__.py
diff --git a/src/antlion/libs/logging/log_stream.py b/packages/antlion/libs/logging/log_stream.py
similarity index 90%
rename from src/antlion/libs/logging/log_stream.py
rename to packages/antlion/libs/logging/log_stream.py
index 27aa077..47c33d0 100644
--- a/src/antlion/libs/logging/log_stream.py
+++ b/packages/antlion/libs/logging/log_stream.py
@@ -16,9 +16,7 @@
 import logging
 import os
 import sys
-from logging import FileHandler
-from logging import Handler
-from logging import StreamHandler
+from logging import FileHandler, Handler, StreamHandler
 from logging.handlers import RotatingFileHandler
 
 from antlion import context
@@ -323,12 +321,10 @@
                             "TESTCASE_LOG is set for log level %s." % level
                         )
             if log_style & LogStyles.ALL_LEVELS == 0:
-                invalid_style_error(
-                    "LogStyle %s needs to set a log " "level." % log_style
-                )
+                invalid_style_error(f"LogStyle {log_style} needs to set a log level.")
             if log_style & ~LogStyles.ALL_LEVELS == 0:
                 invalid_style_error(
-                    "LogStyle %s needs to set a log " "location." % log_style
+                    f"LogStyle {log_style} needs to set a log location."
                 )
             if log_style & LogStyles.ROTATE_LOGS and not log_style & (
                 LogStyles.MONOLITH_LOG
@@ -389,7 +385,7 @@
         directory = self.__get_current_output_dir(
             LogStyles.LOCATION_TO_CONTEXT_LEVEL[location]
         )
-        base_name = "%s_%s.txt" % (self.name, LogStyles.LEVEL_NAMES[level])
+        base_name = f"{self.name}_{LogStyles.LEVEL_NAMES[level]}.txt"
         handler = creator(os.path.join(directory, base_name))
         handler.setLevel(LogStyles.LEVEL_TO_NO[level])
         if self.file_format:
@@ -427,34 +423,3 @@
                 self._testclass_handlers.append(handler)
             if log_style & LogStyles.TESTCASE_LOG:
                 self._testcase_handlers.append(handler)
-
-    def __remove_handler(self, handler):
-        """Removes a handler from the logger, unless it's a NullHandler."""
-        if handler is not _null_handler:
-            handler.close()
-            self.logger.removeHandler(handler)
-
-    def update_handlers(self, event):
-        """Update the output file paths for log handlers upon a change in
-        the test context.
-
-        Args:
-            event: An instance of NewContextEvent.
-        """
-        handlers = []
-        if isinstance(event, context.NewTestClassContextEvent):
-            handlers = self._testclass_handlers + self._testcase_handlers
-        if isinstance(event, context.NewTestCaseContextEvent):
-            handlers = self._testcase_handlers
-
-        if not handlers:
-            return
-        new_dir = self.__get_current_output_dir()
-        for handler in handlers:
-            filename = os.path.basename(handler.baseFilename)
-            handler.set_file(os.path.join(new_dir, filename))
-
-    def cleanup(self):
-        """Removes all LogHandlers from the logger."""
-        for handler in self.logger.handlers:
-            self.__remove_handler(handler)
diff --git a/src/antlion/libs/ota/__init__.py b/packages/antlion/libs/ota/__init__.py
similarity index 100%
rename from src/antlion/libs/ota/__init__.py
rename to packages/antlion/libs/ota/__init__.py
diff --git a/src/antlion/libs/ota/ota_runners/__init__.py b/packages/antlion/libs/ota/ota_runners/__init__.py
similarity index 100%
rename from src/antlion/libs/ota/ota_runners/__init__.py
rename to packages/antlion/libs/ota/ota_runners/__init__.py
diff --git a/src/antlion/libs/ota/ota_runners/ota_runner.py b/packages/antlion/libs/ota/ota_runners/ota_runner.py
similarity index 98%
rename from src/antlion/libs/ota/ota_runners/ota_runner.py
rename to packages/antlion/libs/ota/ota_runners/ota_runner.py
index 05dba4c..848290a 100644
--- a/src/antlion/libs/ota/ota_runners/ota_runner.py
+++ b/packages/antlion/libs/ota/ota_runners/ota_runner.py
@@ -68,7 +68,7 @@
                     'Re-installing SL4A from "%s".', self.get_sl4a_apk()
                 )
                 self.android_device.adb.install(
-                    "-r -g %s" % self.get_sl4a_apk(), ignore_status=True
+                    f"-r -g {self.get_sl4a_apk()}", ignore_status=True
                 )
                 time.sleep(SL4A_SERVICE_SETUP_TIME)
                 if self.android_device.is_sl4a_installed():
diff --git a/src/antlion/libs/ota/ota_runners/ota_runner_factory.py b/packages/antlion/libs/ota/ota_runners/ota_runner_factory.py
similarity index 85%
rename from src/antlion/libs/ota/ota_runners/ota_runner_factory.py
rename to packages/antlion/libs/ota/ota_runners/ota_runner_factory.py
index a5622da..f5b09f4 100644
--- a/src/antlion/libs/ota/ota_runners/ota_runner_factory.py
+++ b/packages/antlion/libs/ota/ota_runners/ota_runner_factory.py
@@ -16,10 +16,8 @@
 
 import logging
 
-from antlion.config_parser import ActsConfigError
 from antlion.libs.ota.ota_runners import ota_runner
-from antlion.libs.ota.ota_tools import ota_tool_factory
-from antlion.libs.ota.ota_tools import adb_sideload_ota_tool
+from antlion.libs.ota.ota_tools import adb_sideload_ota_tool, ota_tool_factory
 
 _bound_devices = {}
 
@@ -27,23 +25,8 @@
 DEFAULT_OTA_COMMAND = "adb"
 
 
-def create_all_from_configs(config, android_devices):
-    """Creates a new OtaTool for each given AndroidDevice.
-
-    After an OtaTool is assigned to a device, another OtaTool cannot be created
-    for that device. This will prevent OTA Update tests that accidentally flash
-    the same build onto a device more than once.
-
-    Args:
-        config: the ACTS config user_params.
-        android_devices: The devices to run an OTA Update on.
-
-    Returns:
-        A list of OtaRunners responsible for updating the given devices. The
-        indexes match the indexes of the corresponding AndroidDevice in
-        android_devices.
-    """
-    return [create_from_configs(config, ad) for ad in android_devices]
+class OtaConfigError(Exception):
+    """Raised when there is a problem in test configuration file."""
 
 
 def create_from_configs(config, android_device):
@@ -65,12 +48,12 @@
         ota_tool_class_name = get_ota_value_from_config(
             config, "ota_tool", android_device
         )
-    except ActsConfigError:
+    except OtaConfigError:
         ota_tool_class_name = DEFAULT_OTA_TOOL
 
     if ota_tool_class_name not in config:
         if ota_tool_class_name is not DEFAULT_OTA_TOOL:
-            raise ActsConfigError(
+            raise OtaConfigError(
                 "If the ota_tool is overloaded, the path to the tool must be "
                 'added to the ACTS config file under {"OtaToolName": '
                 '"path/to/tool"} (in this case, {"%s": "path/to/tool"}.'
@@ -85,7 +68,7 @@
             if len(command) == 1:
                 command = command[0]
             else:
-                raise ActsConfigError(
+                raise OtaConfigError(
                     'Config value for "%s" must be either a string or a list '
                     "of exactly one element" % ota_tool_class_name
                 )
@@ -93,7 +76,7 @@
     ota_package = get_ota_value_from_config(config, "ota_package", android_device)
     ota_sl4a = get_ota_value_from_config(config, "ota_sl4a", android_device)
     if type(ota_sl4a) != type(ota_package):
-        raise ActsConfigError(
+        raise OtaConfigError(
             "The ota_package and ota_sl4a must either both be strings, or "
             'both be lists. Device with serial "%s" has mismatched types.'
             % android_device.serial
@@ -193,19 +176,19 @@
     suffix = ""
     if "ota_map" in config:
         if android_device.serial in config["ota_map"]:
-            suffix = "_%s" % config["ota_map"][android_device.serial]
+            suffix = f"_{config['ota_map'][android_device.serial]}"
 
-    ota_package_key = "%s%s" % (key, suffix)
+    ota_package_key = f"{key}{suffix}"
     if ota_package_key not in config:
         if suffix != "":
-            raise ActsConfigError(
+            raise OtaConfigError(
                 "Asked for an OTA Update without specifying a required value. "
                 '"ota_map" has entry {"%s": "%s"}, but there is no '
                 'corresponding entry {"%s":"/path/to/file"} found within the '
                 "ACTS config." % (android_device.serial, suffix[1:], ota_package_key)
             )
         else:
-            raise ActsConfigError(
+            raise OtaConfigError(
                 "Asked for an OTA Update without specifying a required value. "
                 '"ota_map" does not exist or have a key for serial "%s", and '
                 'the default value entry "%s" cannot be found within the ACTS '
diff --git a/src/antlion/libs/ota/ota_tools/__init__.py b/packages/antlion/libs/ota/ota_tools/__init__.py
similarity index 100%
rename from src/antlion/libs/ota/ota_tools/__init__.py
rename to packages/antlion/libs/ota/ota_tools/__init__.py
diff --git a/src/antlion/libs/ota/ota_tools/adb_sideload_ota_tool.py b/packages/antlion/libs/ota/ota_tools/adb_sideload_ota_tool.py
similarity index 95%
rename from src/antlion/libs/ota/ota_tools/adb_sideload_ota_tool.py
rename to packages/antlion/libs/ota/ota_tools/adb_sideload_ota_tool.py
index f097f45..ad9e883 100644
--- a/src/antlion/libs/ota/ota_tools/adb_sideload_ota_tool.py
+++ b/packages/antlion/libs/ota/ota_tools/adb_sideload_ota_tool.py
@@ -39,7 +39,7 @@
         ota_runner.android_device.adb.wait_for_sideload()
         logging.info("Sideloading ota package")
         package_path = ota_runner.get_ota_package()
-        logging.info('Running adb sideload with package "%s"' % package_path)
+        logging.info(f'Running adb sideload with package "{package_path}"')
         ota_runner.android_device.adb.sideload(package_path, timeout=PUSH_TIMEOUT)
         logging.info("Sideload complete. Waiting for device to come back up.")
         ota_runner.android_device.adb.wait_for_recovery()
diff --git a/src/antlion/libs/ota/ota_tools/ota_tool.py b/packages/antlion/libs/ota/ota_tools/ota_tool.py
similarity index 100%
rename from src/antlion/libs/ota/ota_tools/ota_tool.py
rename to packages/antlion/libs/ota/ota_tools/ota_tool.py
diff --git a/src/antlion/libs/ota/ota_tools/ota_tool_factory.py b/packages/antlion/libs/ota/ota_tools/ota_tool_factory.py
similarity index 100%
rename from src/antlion/libs/ota/ota_tools/ota_tool_factory.py
rename to packages/antlion/libs/ota/ota_tools/ota_tool_factory.py
diff --git a/src/antlion/libs/ota/ota_tools/update_device_ota_tool.py b/packages/antlion/libs/ota/ota_tools/update_device_ota_tool.py
similarity index 91%
rename from src/antlion/libs/ota/ota_tools/update_device_ota_tool.py
rename to packages/antlion/libs/ota/ota_tools/update_device_ota_tool.py
index 4bdde99..1ae0dcf 100644
--- a/src/antlion/libs/ota/ota_tools/update_device_ota_tool.py
+++ b/packages/antlion/libs/ota/ota_tools/update_device_ota_tool.py
@@ -19,9 +19,9 @@
 import shutil
 import tempfile
 
+from antlion import utils
 from antlion.libs.ota.ota_tools import ota_tool
 from antlion.libs.proc import job
-from antlion import utils
 
 # OTA Packages can be upwards of 1 GB. This may take some time to transfer over
 # USB 2.0. A/B devices must also complete the update in the background.
@@ -48,9 +48,9 @@
             ota_runner.serial,
             ota_runner.get_ota_package(),
         )
-        logging.info("Running %s" % update_command)
-        result = job.run(update_command, timeout=UPDATE_TIMEOUT)
-        logging.info("Output: %s" % result.stdout)
+        logging.info(f"Running {update_command}")
+        result = job.run(update_command, timeout_sec=UPDATE_TIMEOUT)
+        logging.info(f'Output: {result.stdout.decode("utf-8")}')
 
         logging.info("Rebooting device for update to go live.")
         ota_runner.android_device.reboot(stop_at_lock_screen=True)
diff --git a/src/antlion/libs/ota/ota_updater.py b/packages/antlion/libs/ota/ota_updater.py
similarity index 100%
rename from src/antlion/libs/ota/ota_updater.py
rename to packages/antlion/libs/ota/ota_updater.py
diff --git a/src/antlion/libs/proc/__init__.py b/packages/antlion/libs/proc/__init__.py
similarity index 100%
rename from src/antlion/libs/proc/__init__.py
rename to packages/antlion/libs/proc/__init__.py
diff --git a/packages/antlion/libs/proc/job.py b/packages/antlion/libs/proc/job.py
new file mode 100644
index 0000000..333f5b6
--- /dev/null
+++ b/packages/antlion/libs/proc/job.py
@@ -0,0 +1,220 @@
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import os
+import shlex
+import subprocess
+import time
+
+from antlion.runner import CalledProcessError, CompletedProcess
+
+
+class Result(CompletedProcess):
+    """Command execution result.
+
+    Contains information on subprocess execution after it has exited.
+
+    Attributes:
+        command: An array containing the command and all arguments that
+                 was executed.
+        exit_status: Integer exit code of the process.
+        stdout_raw: The raw bytes output from standard out.
+        stderr_raw: The raw bytes output from standard error
+        duration: How long the process ran for.
+        did_timeout: True if the program timed out and was killed.
+    """
+
+    def __init__(
+        self,
+        command: str | list[str],
+        stdout: bytes,
+        stderr: bytes,
+        exit_status: int,
+        duration: float = 0,
+        did_timeout: bool = False,
+        encoding: str = "utf-8",
+    ) -> None:
+        """
+        Args:
+            command: The command that was run. This will be a list containing
+                     the executed command and all args.
+            stdout: The raw bytes that standard output gave.
+            stderr: The raw bytes that standard error gave.
+            exit_status: The exit status of the command.
+            duration: How long the command ran.
+            did_timeout: True if the command timed out.
+            encoding: The encoding standard that the program uses.
+        """
+        self.command = command
+        self.exit_status = exit_status
+        self._raw_stdout = stdout
+        self._raw_stderr = stderr
+        self._stdout_str: str | None = None
+        self._stderr_str: str | None = None
+        self._encoding = encoding
+        self.duration = duration
+        self.did_timeout = did_timeout
+
+    @property
+    def stdout(self) -> str:
+        """String representation of standard output."""
+        if not self._stdout_str:
+            self._stdout_str = self._raw_stdout.decode(
+                encoding=self._encoding, errors="replace"
+            )
+            self._stdout_str = self._stdout_str.strip()
+        return self._stdout_str
+
+    @property
+    def stderr(self) -> str:
+        """String representation of standard error."""
+        if not self._stderr_str:
+            self._stderr_str = self._raw_stderr.decode(
+                encoding=self._encoding, errors="replace"
+            )
+            self._stderr_str = self._stderr_str.strip()
+        return self._stderr_str
+
+    @property
+    def returncode(self) -> int:
+        return self.exit_status
+
+    def __repr__(self) -> str:
+        if self.did_timeout:
+            prefix = f"Command timed out"
+        else:
+            prefix = f"Command exited with {self.exit_status}"
+
+        command = (
+            " ".join(self.command) if isinstance(self.command, list) else self.command
+        )
+
+        return (
+            f"{prefix} after {self.duration}s: {command}\n"
+            f"stdout: {self._raw_stdout.decode('utf-8', errors='replace')}\n"
+            f"stderr: {self._raw_stderr.decode('utf-8', errors='replace')}"
+        )
+
+
+def run(
+    command: str | list[str],
+    stdin: bytes | None = None,
+    timeout_sec: float | None = 60,
+    log_output: bool = True,
+    ignore_status: bool = False,
+    env: dict[str, str] | None = None,
+) -> subprocess.CompletedProcess[bytes]:
+    """Execute a command in a subprocess and return its output.
+
+    Commands can be either shell commands (given as strings) or the
+    path and arguments to an executable (given as a list).  This function
+    will block until the subprocess finishes or times out.
+
+    Args:
+        command: The command to execute.
+        timeout_sec: number seconds to wait for command to finish.
+        log_output: If true, print stdout and stderr to the debug log.
+        ignore_status: True to ignore the exit code of the remote
+                       subprocess.  Note that if you do ignore status codes,
+                       you should handle non-zero exit codes explicitly.
+        env: environment variables to setup on the remote host.
+
+    Returns:
+        Result of the ssh command.
+
+    Raises:
+        CalledProcessError: when the process exits with a non-zero status
+            and ignore_status is False.
+        subprocess.TimeoutExpired: When the remote command took to long to
+            execute.
+    """
+    start = time.perf_counter()
+    proc = subprocess.Popen(
+        command,
+        env=env,
+        stdout=subprocess.PIPE,
+        stderr=subprocess.PIPE,
+        stdin=subprocess.PIPE,
+        shell=not isinstance(command, list),
+    )
+    # Wait on the process terminating
+    timed_out = False
+    stdout = bytes()
+    stderr = bytes()
+    try:
+        (stdout, stderr) = proc.communicate(stdin, timeout_sec)
+    except subprocess.TimeoutExpired:
+        timed_out = True
+        proc.kill()
+        proc.wait()
+
+    elapsed = time.perf_counter() - start
+    exit_code = proc.poll()
+    if log_output:
+        logging.debug(
+            "Command %s exited with %d after %.2fs\nstdout: %s\nstderr: %s",
+            shlex.join(command),
+            exit_code,
+            elapsed,
+            stdout.decode("utf-8", errors="replace"),
+            stderr.decode("utf-8", errors="replace"),
+        )
+    else:
+        logging.debug(
+            "Command %s exited with %d after %.2fs",
+            shlex.join(command),
+            exit_code,
+            elapsed,
+        )
+
+    if timed_out:
+        raise subprocess.TimeoutExpired(command, elapsed, stdout, stderr)
+
+    if not ignore_status and exit_code != 0:
+        raise CalledProcessError(proc.returncode, command, stdout, stderr)
+
+    return subprocess.CompletedProcess(command, proc.returncode, stdout, stderr)
+
+
+def run_async(
+    command: str | list[str], env: dict[str, str] | None = None
+) -> subprocess.Popen[bytes]:
+    """Execute a command in a subproccess asynchronously.
+
+    It is the callers responsibility to kill/wait on the resulting
+    subprocess.Popen object.
+
+    Commands can be either shell commands (given as strings) or the
+    path and arguments to an executable (given as a list).  This function
+    will not block.
+
+    Args:
+        command: The command to execute. Can be either a string or a list.
+        env: dict enviroment variables to setup on the remote host.
+
+    Returns:
+        A subprocess.Popen object representing the created subprocess.
+
+    """
+    proc = subprocess.Popen(
+        command,
+        env=env,
+        preexec_fn=os.setpgrp,
+        shell=not isinstance(command, list),
+        stdout=subprocess.PIPE,
+        stderr=subprocess.STDOUT,
+    )
+    logging.debug("command %s started with pid %s", command, proc.pid)
+    return proc
diff --git a/src/antlion/libs/proc/process.py b/packages/antlion/libs/proc/process.py
similarity index 72%
rename from src/antlion/libs/proc/process.py
rename to packages/antlion/libs/proc/process.py
index 9a3bbcd..47b8842 100644
--- a/src/antlion/libs/proc/process.py
+++ b/packages/antlion/libs/proc/process.py
@@ -14,6 +14,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import annotations
+
 import logging
 import os
 import shlex
@@ -21,9 +23,9 @@
 import subprocess
 import sys
 import time
+from collections.abc import Callable
 from threading import Thread
-
-_on_windows = sys.platform == "win32"
+from typing import Self
 
 
 class ProcessError(Exception):
@@ -47,36 +49,33 @@
         _stopped: Whether or not stop() was called.
     """
 
-    def __init__(self, command, **kwargs):
+    def __init__(self, command: list[str] | str) -> None:
         """Creates a Process object.
 
         Note that this constructor does not begin the process. To start the
         process, use Process.start().
         """
-        # Split command string into list if shell=True is not specified
-        self._use_shell = kwargs.get("shell", False)
-        if not self._use_shell and isinstance(command, str):
+        if isinstance(command, str):
+            # Split command string into list
             command = shlex.split(command)
         self._command = command
-        self._subprocess_kwargs = kwargs
-        if _on_windows:
-            self._subprocess_kwargs[
-                "creationflags"
-            ] = subprocess.CREATE_NEW_PROCESS_GROUP
-        else:
-            self._subprocess_kwargs["start_new_session"] = True
-        self._process = None
 
-        self._listening_thread = None
-        self._redirection_thread = None
-        self._on_output_callback = lambda *args, **kw: None
-        self._binary_output = False
-        self._on_terminate_callback = lambda *args, **kw: ""
+        self._process: subprocess.Popen[bytes] | None = None
 
-        self._started = False
-        self._stopped = False
+        self._listening_thread: Thread | None = None
+        self._redirection_thread: Thread | None = None
+        self._on_output_callback: Callable[[str | bytes], None] = lambda _: None
+        self._binary_output: bool = False
+        self._on_terminate_callback: Callable[
+            [subprocess.Popen[bytes]], list[str] | str
+        ] = lambda _: ""
 
-    def set_on_output_callback(self, on_output_callback, binary=False):
+        self._started: bool = False
+        self._stopped: bool = False
+
+    def set_on_output_callback(
+        self, on_output_callback: Callable[[str | bytes], None], binary: bool = False
+    ) -> Self:
         """Sets the on_output_callback function.
 
         Args:
@@ -94,7 +93,10 @@
         self._binary_output = binary
         return self
 
-    def set_on_terminate_callback(self, on_terminate_callback):
+    def set_on_terminate_callback(
+        self,
+        on_terminate_callback: Callable[[subprocess.Popen[bytes]], list[str] | str],
+    ) -> Self:
         """Sets the on_self_terminate callback function.
 
         Args:
@@ -115,7 +117,7 @@
         self._on_terminate_callback = on_terminate_callback
         return self
 
-    def start(self):
+    def start(self) -> None:
         """Starts the process's execution."""
         if self._started:
             raise ProcessError("Process has already started.")
@@ -134,10 +136,10 @@
         self._stopped = False
 
     @staticmethod
-    def _get_timeout_left(timeout, start_time):
+    def _get_timeout_left(timeout, start_time) -> float:
         return max(0.1, timeout - (time.time() - start_time))
 
-    def is_running(self):
+    def is_running(self) -> bool:
         """Checks that the underlying Popen process is still running
 
         Returns:
@@ -145,7 +147,7 @@
         """
         return self._process is not None and self._process.poll() is None
 
-    def _join_threads(self):
+    def _join_threads(self) -> None:
         """Waits for the threads associated with the process to terminate."""
         if self._listening_thread is not None:
             self._listening_thread.join()
@@ -155,15 +157,15 @@
             self._redirection_thread.join()
             self._redirection_thread = None
 
-    def _kill_process(self):
+    def _kill_process(self) -> None:
         """Kills the underlying process/process group. Implementation is
         platform-dependent."""
-        if _on_windows:
-            subprocess.check_call("taskkill /F /T /PID %s" % self._process.pid)
+        if sys.platform == "win32":
+            subprocess.check_call(f"taskkill /F /T /PID {self._process.pid}")
         else:
             self.signal(signal.SIGKILL)
 
-    def wait(self, kill_timeout=60.0):
+    def wait(self, kill_timeout: float = 60.0) -> None:
         """Waits for the process to finish execution.
 
         If the process has reached the kill_timeout, the process will be killed
@@ -175,7 +177,7 @@
         Args:
             kill_timeout: The amount of time to wait until killing the process.
         """
-        if self._stopped:
+        if self._stopped or self._process is None:
             raise ProcessError("Process is already being stopped.")
         self._stopped = True
 
@@ -187,19 +189,21 @@
             self._join_threads()
             self._started = False
 
-    def signal(self, sig):
+    def signal(self, sig) -> None:
         """Sends a signal to the process.
 
         Args:
             sig: The signal to be sent.
         """
-        if _on_windows:
+        if sys.platform == "win32":
             raise ProcessError("Unable to call Process.signal on windows.")
+        if self._process is None:
+            raise ProcessError("No process is running")
 
         pgid = os.getpgid(self._process.pid)
         os.killpg(pgid, sig)
 
-    def stop(self):
+    def stop(self) -> None:
         """Stops the process.
 
         This command is effectively equivalent to kill, but gives time to clean
@@ -210,34 +214,30 @@
         """
         self.wait(0)
 
-    def _redirect_output(self):
+    def _redirect_output(self) -> None:
         """Redirects the output from the command into the on_output_callback."""
-        if self._binary_output:
-            while True:
+        if self._process is None:
+            raise ProcessError("No process is running")
+        if self._process.stdout is None:
+            raise ProcessError("Process stdout is not PIPE")
+
+        while True:
+            data: str | bytes
+            if self._binary_output:
                 data = self._process.stdout.read(1024)
+            else:
+                data = (
+                    self._process.stdout.readline()
+                    .decode("utf-8", errors="replace")
+                    .rstrip()
+                )
 
-                if not data:
-                    return
-                else:
-                    self._on_output_callback(data)
-        else:
-            while True:
-                line = self._process.stdout.readline().decode("utf-8", errors="replace")
+            if not data:
+                return
+            else:
+                self._on_output_callback(data)
 
-                if not line:
-                    return
-                else:
-                    # Output the line without trailing \n and whitespace.
-                    self._on_output_callback(line.rstrip())
-
-    @staticmethod
-    def __start_process(command, **kwargs):
-        """A convenient wrapper function for starting the process."""
-        acts_logger = logging.getLogger()
-        acts_logger.debug('Starting command "%s" with kwargs %s', command, kwargs)
-        return subprocess.Popen(command, **kwargs)
-
-    def _exec_loop(self):
+    def _exec_loop(self) -> None:
         """Executes Popen in a loop.
 
         When Popen terminates without stop() being called,
@@ -248,12 +248,20 @@
         """
         command = self._command
         while True:
-            self._process = self.__start_process(
+            acts_logger = logging.getLogger()
+            acts_logger.debug('Starting command "%s"', command)
+
+            creationflags: int = 0
+            if sys.platform == "win32":
+                creationflags = subprocess.CREATE_NEW_PROCESS_GROUP
+
+            self._process = subprocess.Popen(
                 command,
                 stdout=subprocess.PIPE,
                 stderr=subprocess.STDOUT,
+                creationflags=creationflags,
+                start_new_session=False if sys.platform == "win32" else True,
                 bufsize=1,
-                **self._subprocess_kwargs,
             )
             self._redirection_thread = Thread(target=self._redirect_output)
             self._redirection_thread.start()
@@ -270,7 +278,7 @@
                 logging.debug("Beginning on_terminate_callback for %s.", command)
                 retry_value = self._on_terminate_callback(self._process)
                 if retry_value:
-                    if not self._use_shell and isinstance(retry_value, str):
+                    if isinstance(retry_value, str):
                         retry_value = shlex.split(retry_value)
                     command = retry_value
                 else:
diff --git a/packages/antlion/logger.py b/packages/antlion/logger.py
new file mode 100755
index 0000000..91268c9
--- /dev/null
+++ b/packages/antlion/logger.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+from types import TracebackType
+
+
+class LogLevel:
+    """Sets the logging level threshold for logger within this context.
+
+    Logging messages which are equal or less severe than level will be ignored.
+    See https://docs.python.org/3/library/logging.html#levels for a list of
+    levels.
+    """
+
+    def __init__(
+        self, logger: logging.Logger | logging.LoggerAdapter, level: int
+    ) -> None:
+        self._logger = logger
+        if isinstance(logger, logging.Logger):
+            self._old_level = logger.level
+        else:
+            self._old_level = logger.logger.level
+        self._new_level = level
+
+    def __enter__(self) -> logging.Logger | logging.LoggerAdapter:
+        self._logger.setLevel(self._new_level)
+        return self._logger
+
+    def __exit__(
+        self,
+        _exit_type: type[BaseException] | None,
+        _exit_value: BaseException | None,
+        _exit_traceback: TracebackType | None,
+    ) -> None:
+        self._logger.setLevel(self._old_level)
diff --git a/src/antlion/net.py b/packages/antlion/net.py
similarity index 92%
rename from src/antlion/net.py
rename to packages/antlion/net.py
index 6f56703..e4a1851 100644
--- a/src/antlion/net.py
+++ b/packages/antlion/net.py
@@ -15,10 +15,8 @@
 # limitations under the License.
 
 import errno
-import time
 import socket
-
-from typing import Optional
+import time
 
 
 def wait_for_port(host: str, port: int, timeout_sec: int = 5) -> None:
@@ -36,7 +34,7 @@
         TimeoutError: when timeout_sec has expired without a successful
             connection to the service
     """
-    last_error: Optional[OSError] = None
+    last_error: OSError | None = None
     timeout = time.perf_counter() + timeout_sec
 
     while True:
@@ -48,8 +46,7 @@
             # Occurs when the host is online but not ready to accept connections
             # yet; wait to see if the host becomes ready.
             last_error = e
-        except socket.timeout as e:
-            # socket.timeout was aliased to TimeoutError in Python 3.10.
+        except TimeoutError as e:
             last_error = e
         except OSError as e:
             if e.errno == errno.EHOSTUNREACH:
diff --git a/src/antlion/__init__.py b/packages/antlion/py.typed
similarity index 100%
copy from src/antlion/__init__.py
copy to packages/antlion/py.typed
diff --git a/packages/antlion/runner.py b/packages/antlion/runner.py
new file mode 100644
index 0000000..962a46d
--- /dev/null
+++ b/packages/antlion/runner.py
@@ -0,0 +1,141 @@
+# Copyright 2023 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+import logging
+import subprocess
+from os import PathLike
+from typing import IO, Protocol, Sequence, TypeAlias
+
+from mobly import signals
+
+StrOrBytesPath: TypeAlias = str | bytes | PathLike[str] | PathLike[bytes]
+_CMD: TypeAlias = StrOrBytesPath | Sequence[StrOrBytesPath]
+
+
+class Runner(Protocol):
+    """A command runner."""
+
+    log: logging.LoggerAdapter
+
+    def run(
+        self,
+        command: str | list[str],
+        stdin: bytes | None = None,
+        timeout_sec: float | None = None,
+        log_output: bool = True,
+    ) -> subprocess.CompletedProcess[bytes]:
+        """Run command with arguments.
+
+        Args:
+            command: Command to execute
+            stdin: Standard input to command.
+            timeout_sec: Seconds to wait for command to finish
+            log_output: If true, print stdout and stderr to the debug log.
+
+        Returns:
+            Result of the completed command.
+
+        Raises:
+            CalledProcessError: when the process exits with a non-zero status
+            subprocess.TimeoutExpired: when the timeout expires while waiting
+                for a child process
+            CalledProcessTransportError: when the underlying transport fails
+        """
+        ...
+
+    def run_async(self, command: str) -> subprocess.CompletedProcess[bytes]:
+        """Run command asynchronously.
+
+        Args:
+            command: Command to execute
+
+        Returns:
+            Results of the dispatched command.
+
+        Raises:
+            CalledProcessError: when the process fails to start
+            subprocess.TimeoutExpired: when the timeout expires while waiting
+                for a child process
+            CalledProcessTransportError: when the underlying transport fails
+        """
+        ...
+
+    def start(
+        self,
+        command: list[str],
+        stdout: IO[bytes] | int = subprocess.PIPE,
+        stdin: IO[bytes] | int = subprocess.PIPE,
+    ) -> subprocess.Popen[bytes]:
+        """Execute a child program in a new process."""
+        ...
+
+
+class CompletedProcess(Protocol):
+    @property
+    def returncode(self) -> int:
+        """Exit status."""
+        ...
+
+    @property
+    def stdout(self) -> str:
+        """Output stream."""
+        ...
+
+    @property
+    def stderr(self) -> str:
+        """Error output stream."""
+        ...
+
+
+class CalledProcessError(subprocess.CalledProcessError):
+    """Wrapper over subprocess.CalledProcessError to guarantee stdout and stderr
+    are bytes and not None."""
+
+    returncode: int
+    cmd: _CMD
+    output: bytes
+
+    stdout: bytes
+    stderr: bytes
+
+    def __init__(
+        self: CalledProcessError,
+        returncode: int,
+        cmd: _CMD,
+        output: str | bytes | None = None,
+        stderr: str | bytes | None = None,
+    ) -> None:
+        # For useability, guaranteed stdout and stderr are bytes and not None.
+        if isinstance(output, str):
+            output = output.encode("utf-8")
+        if isinstance(stderr, str):
+            stderr = stderr.encode("utf-8")
+        if output is None:
+            output = bytes()
+        if stderr is None:
+            stderr = bytes()
+
+        super().__init__(returncode, cmd, output, stderr)
+
+    def __str__(self):
+        out = super().__str__()
+        out += f'\nstderr: {self.stderr.decode("utf-8", errors="replace")}'
+        out += f'\nstdout: {self.stdout.decode("utf-8", errors="replace")}'
+        return out
+
+
+class CalledProcessTransportError(signals.TestError):
+    """Error in process's underlying transport."""
diff --git a/src/antlion/test_utils/OWNERS b/packages/antlion/test_utils/OWNERS
similarity index 100%
rename from src/antlion/test_utils/OWNERS
rename to packages/antlion/test_utils/OWNERS
diff --git a/src/antlion/test_utils/__init__.py b/packages/antlion/test_utils/__init__.py
similarity index 100%
rename from src/antlion/test_utils/__init__.py
rename to packages/antlion/test_utils/__init__.py
diff --git a/src/antlion/test_utils/abstract_devices/__init__.py b/packages/antlion/test_utils/abstract_devices/__init__.py
similarity index 100%
rename from src/antlion/test_utils/abstract_devices/__init__.py
rename to packages/antlion/test_utils/abstract_devices/__init__.py
diff --git a/packages/antlion/test_utils/abstract_devices/wlan_device.py b/packages/antlion/test_utils/abstract_devices/wlan_device.py
new file mode 100644
index 0000000..0f253b7
--- /dev/null
+++ b/packages/antlion/test_utils/abstract_devices/wlan_device.py
@@ -0,0 +1,551 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+import enum
+from typing import Protocol, runtime_checkable
+import fidl_fuchsia_wlan_common as f_wlan_common
+
+from honeydew.affordances.connectivity.wlan.utils.types import (
+    ClientStatusConnected,
+    ClientStatusConnecting,
+    ClientStatusIdle,
+    ConnectionState,
+)
+from honeydew.affordances.connectivity.wlan.utils.types import (
+    SecurityType as HdSecurityType,
+)
+from mobly.records import TestResultRecord
+
+from antlion.controllers import iperf_client
+from antlion.controllers.android_device import AndroidDevice
+from antlion.controllers.ap_lib.hostapd_security import SecurityMode
+from antlion.controllers.fuchsia_device import FuchsiaDevice
+from antlion.controllers.fuchsia_lib.lib_controllers.wlan_policy_controller import (
+    WlanPolicyControllerError,
+)
+from antlion.controllers.iperf_client import IPerfClientBase
+from antlion.controllers.pdu import PduDevice
+from antlion.test_utils.wifi import wifi_test_utils as awutils
+from antlion.utils import PingResult, adb_shell_ping
+
+DEFAULT_ASSOCIATE_TIMEOUT_SEC = 30
+
+
+@runtime_checkable
+class SupportsWLAN(Protocol):
+    """A generic WLAN device."""
+
+    @property
+    def identifier(self) -> str:
+        """Unique identifier for this device."""
+        ...
+
+    @property
+    def has_wep_support(self) -> bool:
+        "Whether the wlan_device has support for WEP security"
+        ...
+
+    @property
+    def has_wpa_support(self) -> bool:
+        "Whether the wlan_device has support for WPA security"
+        ...
+
+    def take_bug_report(self, record: TestResultRecord) -> None:
+        """Take a bug report on the device and stores it on the host.
+
+        Will store the bug report in the output directory for the currently running
+        test, as specified by `record`.
+
+        Args:
+            record: Information about the current running test.
+        """
+        ...
+
+    def associate(
+        self,
+        target_ssid: str,
+        target_security: SecurityMode,
+        target_pwd: str | None = None,
+        key_mgmt: str | None = None,
+        check_connectivity: bool = True,
+        hidden: bool = False,
+    ) -> bool:
+        """Associate to a target network.
+
+        Args:
+            target_ssid: SSID to associate to.
+            target_pwd: Password for the SSID, if necessary.
+            key_mgmt: The hostapd wpa_key_mgmt, if specified.
+            check_connectivity: Whether to check for internet connectivity.
+            hidden: Whether the network is hidden.
+            target_security: Target security for network, used to
+                save the network in policy connects (see wlan_policy_lib)
+        Returns:
+            True if successfully connected to WLAN, False if not.
+        """
+        ...
+
+    def disconnect(self) -> None:
+        """Disconnect from all WLAN networks."""
+        ...
+
+    def get_default_wlan_test_interface(self) -> str:
+        """Name of default WLAN interface to use for testing."""
+        ...
+
+    def is_connected(self, ssid: str | None = None) -> bool:
+        """Determines if wlan_device is connected to wlan network.
+
+        Args:
+            ssid: If specified, check if device is connected to a specific network.
+
+        Returns:
+            True if connected to requested network; or if ssid not specified, True if
+            connected to any network; otherwise, False.
+        """
+        ...
+
+    def create_iperf_client(self, test_interface: str | None = None) -> IPerfClientBase:
+        """Create an iPerf3 client on this device.
+
+        Args:
+            test_interface: Name of test interface. Defaults to first found wlan client
+                interface.
+
+        Returns:
+            IPerfClient object
+        """
+        ...
+
+    def get_wlan_interface_id_list(self) -> list[int]:
+        """List available WLAN interfaces.
+
+        Returns:
+            A list of wlan interface IDs.
+        """
+        ...
+
+    def destroy_wlan_interface(self, iface_id: int) -> None:
+        """Destroy the specified WLAN interface.
+
+        Args:
+            iface_id: ID of the interface to destroy.
+        """
+        ...
+
+    def ping(
+        self,
+        dest_ip: str,
+        count: int = 3,
+        interval: int = 1000,
+        timeout: int = 1000,
+        size: int = 25,
+        additional_ping_params: str | None = None,
+    ) -> PingResult:
+        """Pings from a device to an IP address or hostname
+
+        Args:
+            dest_ip: IP or hostname to ping
+            count: How many icmp packets to send
+            interval: Milliseconds to wait between pings
+            timeout: Milliseconds to wait before having the icmp packet timeout
+            size: Size of the icmp packet in bytes
+            additional_ping_params: Command option flags to append to the command string
+
+        Returns:
+            A dictionary for the results of the ping. The dictionary contains
+            the following items:
+                status: Whether the ping was successful.
+                rtt_min: The minimum round trip time of the ping.
+                rtt_max: The minimum round trip time of the ping.
+                rtt_avg: The avg round trip time of the ping.
+                stdout: The standard out of the ping command.
+                stderr: The standard error of the ping command.
+        """
+        ...
+
+    def hard_power_cycle(self, pdus: list[PduDevice]) -> None:
+        """Reboot a device abruptly without notification.
+
+        Args:
+            pdus: All testbed PDUs
+        """
+        ...
+
+    def feature_is_present(self, feature: str) -> bool:
+        """Check if a WLAN feature is present.
+
+        Args:
+            feature: WLAN feature to query
+
+        Returns:
+            True if `feature` is present; otherwise, False.
+        """
+        ...
+
+    def wifi_toggle_state(self, state: bool | None) -> None:
+        """Toggle the state of Wi-Fi.
+
+        Args:
+            state: Wi-Fi state to set to. If None, opposite of the current state.
+        """
+        ...
+
+    def reset_wifi(self) -> None:
+        """Clears all saved Wi-Fi networks on a device.
+
+        This will turn Wi-Fi on.
+        """
+        ...
+
+    def turn_location_off_and_scan_toggle_off(self) -> None:
+        """Turn off Wi-Fi location scans."""
+        ...
+
+
+class AndroidWlanDevice(SupportsWLAN):
+    """Android device that supports WLAN."""
+
+    def __init__(self, android_device: AndroidDevice) -> None:
+        self.device = android_device
+
+    @property
+    def identifier(self) -> str:
+        return self.device.serial
+
+    @property
+    def has_wep_support(self) -> bool:
+        "Whether the wlan_device has support for WEP security"
+        return True
+
+    @property
+    def has_wpa_support(self) -> bool:
+        "Whether the wlan_device has support for WPA security"
+        return True
+
+    def wifi_toggle_state(self, state: bool | None) -> None:
+        awutils.wifi_toggle_state(self.device, state)
+
+    def reset_wifi(self) -> None:
+        awutils.reset_wifi(self.device)
+
+    def take_bug_report(self, record: TestResultRecord) -> None:
+        self.device.take_bug_report(record.test_name, record.begin_time)
+
+    def turn_location_off_and_scan_toggle_off(self) -> None:
+        awutils.turn_location_off_and_scan_toggle_off(self.device)
+
+    def associate(
+        self,
+        target_ssid: str,
+        target_security: SecurityMode,
+        target_pwd: str | None = None,
+        key_mgmt: str | None = None,
+        check_connectivity: bool = True,
+        hidden: bool = False,
+    ) -> bool:
+        network = {"SSID": target_ssid, "hiddenSSID": hidden}
+        if target_pwd:
+            network["password"] = target_pwd
+        if key_mgmt:
+            network["security"] = key_mgmt
+        try:
+            awutils.connect_to_wifi_network(
+                self.device,
+                network,
+                check_connectivity=check_connectivity,
+                hidden=hidden,
+            )
+            return True
+        except Exception as e:
+            self.device.log.info(f"Failed to associated ({e})")
+            return False
+
+    def disconnect(self) -> None:
+        awutils.turn_location_off_and_scan_toggle_off(self.device)
+
+    def get_wlan_interface_id_list(self) -> list[int]:
+        raise NotImplementedError("get_wlan_interface_id_list is not implemented")
+
+    def get_default_wlan_test_interface(self) -> str:
+        return "wlan0"
+
+    def destroy_wlan_interface(self, iface_id: int) -> None:
+        raise NotImplementedError("destroy_wlan_interface is not implemented")
+
+    def is_connected(self, ssid: str | None = None) -> bool:
+        wifi_info = self.device.droid.wifiGetConnectionInfo()
+        if ssid:
+            return "BSSID" in wifi_info and wifi_info["SSID"] == ssid
+        return "BSSID" in wifi_info
+
+    def ping(
+        self,
+        dest_ip: str,
+        count: int = 3,
+        interval: int = 1000,
+        timeout: int = 1000,
+        size: int = 25,
+        additional_ping_params: str | None = None,
+    ) -> PingResult:
+        success = adb_shell_ping(self.device, dest_ip, count=count, timeout=timeout)
+        return PingResult(
+            exit_status=0 if success else 1,
+            # TODO: Implement the rest if needed for any tests
+            stdout="",
+            stderr="",
+            transmitted=None,
+            received=None,
+            time_ms=None,
+            rtt_min_ms=None,
+            rtt_avg_ms=None,
+            rtt_max_ms=None,
+            rtt_mdev_ms=None,
+        )
+
+    def hard_power_cycle(self, pdus: list[PduDevice]) -> None:
+        raise NotImplementedError("hard_power_cycle is not implemented")
+
+    def create_iperf_client(self, test_interface: str | None = None) -> IPerfClientBase:
+        if not test_interface:
+            test_interface = self.get_default_wlan_test_interface()
+
+        return iperf_client.IPerfClientOverAdb(
+            android_device=self.device, test_interface=test_interface
+        )
+
+    def feature_is_present(self, feature: str) -> bool:
+        raise NotImplementedError("feature_is_present is not implemented")
+
+
+class AssociationMode(enum.Enum):
+    """Defines which FIDLs to use for WLAN association and disconnect."""
+
+    DRIVER = 1
+    """Call WLAN core FIDLs to provide all association and disconnect."""
+    POLICY = 2
+    """Call WLAN policy FIDLs to provide all association and disconnect."""
+
+
+class FuchsiaWlanDevice(SupportsWLAN):
+    """Fuchsia device that supports WLAN."""
+
+    def __init__(self, fuchsia_device: FuchsiaDevice, mode: AssociationMode):
+        self.device = fuchsia_device
+        self.device.configure_wlan()
+        self.association_mode = mode
+
+    @property
+    def identifier(self) -> str:
+        return self.device.ip
+
+    @property
+    def has_wep_support(self) -> bool:
+        for line in self._get_wlandevicemonitor_config().splitlines():
+            if "wep_supported" in line and "Bool(true)" in line:
+                return True
+        return False
+
+    @property
+    def has_wpa_support(self) -> bool:
+        for line in self._get_wlandevicemonitor_config().splitlines():
+            if "wpa1_supported" in line and "Bool(true)" in line:
+                return True
+        return False
+
+    def _get_wlandevicemonitor_config(self) -> str:
+        return self.device.ffx.run(["component", "show", "core/wlandevicemonitor"])
+
+    def wifi_toggle_state(self, state: bool | None) -> None:
+        pass
+
+    def reset_wifi(self) -> None:
+        pass
+
+    def take_bug_report(self, _: TestResultRecord) -> None:
+        self.device.take_bug_report()
+
+    def turn_location_off_and_scan_toggle_off(self) -> None:
+        pass
+
+    def associate(
+        self,
+        target_ssid: str,
+        target_security: SecurityMode,
+        target_pwd: str | None = None,
+        key_mgmt: str | None = None,
+        check_connectivity: bool = True,
+        hidden: bool = False,
+        timeout_sec: int = DEFAULT_ASSOCIATE_TIMEOUT_SEC,
+    ) -> bool:
+        match self.association_mode:
+            case AssociationMode.DRIVER:
+                ssid_bss_desc_map = self.device.honeydew_fd.wlan.scan_for_bss_info()
+
+                bss_descs_for_ssid = ssid_bss_desc_map.get(target_ssid, None)
+                if not bss_descs_for_ssid or len(bss_descs_for_ssid) < 1:
+                    self.device.log.error(
+                        "Scan failed to find a BSS description for target_ssid "
+                        f"{target_ssid}"
+                    )
+                    return False
+
+                return self.device.honeydew_fd.wlan.connect(
+                    target_ssid, target_pwd, bss_descs_for_ssid[0]
+                )
+            case AssociationMode.POLICY:
+                try:
+                    self.device.honeydew_fd.wlan_policy.save_network(
+                        target_ssid,
+                        HdSecurityType(target_security.fuchsia_security_type()),
+                        target_pwd=target_pwd,
+                    )
+                    status = self.device.honeydew_fd.wlan_policy.connect(
+                        target_ssid,
+                        HdSecurityType(target_security.fuchsia_security_type()),
+                    )
+                    if status is f_wlan_common.RequestStatus.ACKNOWLEDGED:
+                        self.device.wlan_policy_controller.wait_for_network_state(
+                            target_ssid,
+                            ConnectionState.CONNECTED,
+                            timeout_sec=timeout_sec,
+                        )
+                    else:
+                        self.device.log.warning(
+                            f"Received request status: {status.name} while trying to "
+                            f"connect to ssid: {target_ssid}."
+                        )
+                        return False
+
+                    return True
+                except WlanPolicyControllerError as e:
+                    self.device.log.error(
+                        f"Failed to save and connect to {target_ssid} with "
+                        f"error: {e}"
+                    )
+                    return False
+
+    def disconnect(self) -> None:
+        """Function to disconnect from a Fuchsia WLAN device.
+        Asserts if disconnect was not successful.
+        """
+        match self.association_mode:
+            case AssociationMode.DRIVER:
+                self.device.honeydew_fd.wlan.disconnect()
+            case AssociationMode.POLICY:
+                self.device.honeydew_fd.wlan_policy.remove_all_networks()
+                self.device.wlan_policy_controller.wait_for_no_connections()
+
+    def ping(
+        self,
+        dest_ip: str,
+        count: int = 3,
+        interval: int = 1000,
+        timeout: int = 1000,
+        size: int = 25,
+        additional_ping_params: str | None = None,
+    ) -> PingResult:
+        return self.device.ping(
+            dest_ip,
+            count=count,
+            interval=interval,
+            timeout=timeout,
+            size=size,
+            additional_ping_params=additional_ping_params,
+        )
+
+    def get_wlan_interface_id_list(self) -> list[int]:
+        return self.device.honeydew_fd.wlan.get_iface_id_list()
+
+    def get_default_wlan_test_interface(self) -> str:
+        if self.device.wlan_client_test_interface_name is None:
+            raise TypeError("Expected wlan_client_test_interface_name to be str")
+        return self.device.wlan_client_test_interface_name
+
+    def destroy_wlan_interface(self, iface_id: int) -> None:
+        self.device.honeydew_fd.wlan.destroy_iface(iface_id)
+
+    def is_connected(self, ssid: str | None = None) -> bool:
+        result = self.device.honeydew_fd.wlan.status()
+        match result:
+            case ClientStatusIdle():
+                self.device.log.info("Client status idle")
+                return False
+            case ClientStatusConnecting():
+                ssid_bytes = bytearray(result.ssid).decode(
+                    encoding="utf-8", errors="replace"
+                )
+                self.device.log.info(f"Client status connecting to ssid: {ssid_bytes}")
+                return False
+            case ClientStatusConnected():
+                ssid_bytes = bytearray(result.ssid).decode(
+                    encoding="utf-8", errors="replace"
+                )
+                self.device.log.info(f"Client connected to ssid: {ssid_bytes}")
+                if ssid is None:
+                    return True
+                return ssid == ssid_bytes
+            case _:
+                raise ValueError(
+                    "Status did not return a valid status response: " f"{result}"
+                )
+
+    def hard_power_cycle(self, pdus: list[PduDevice]) -> None:
+        self.device.reboot(reboot_type="hard", testbed_pdus=pdus)
+
+    def create_iperf_client(self, test_interface: str | None = None) -> IPerfClientBase:
+        if not test_interface:
+            test_interface = self.get_default_wlan_test_interface()
+
+        # A package server is necessary to acquire the iperf3 client for
+        # some builds.
+        self.device.start_package_server()
+
+        return iperf_client.IPerfClientOverSsh(
+            ssh_provider=self.device.ssh,
+            test_interface=test_interface,
+            # Fuchsia's date tool does not support setting system date/time.
+            sync_date=False,
+        )
+
+    def feature_is_present(self, feature: str) -> bool:
+        return feature in self.device.wlan_features
+
+
+def create_wlan_device(
+    hardware_device: FuchsiaDevice | AndroidDevice,
+    associate_mode: AssociationMode,
+) -> SupportsWLAN:
+    """Creates a generic WLAN device based on type of device that is sent to
+    the functions.
+
+    Args:
+        hardware_device: A WLAN hardware device that is supported by ACTS.
+    """
+    device: SupportsWLAN
+    if isinstance(hardware_device, FuchsiaDevice):
+        device = FuchsiaWlanDevice(hardware_device, associate_mode)
+    elif isinstance(hardware_device, AndroidDevice):
+        device = AndroidWlanDevice(hardware_device)
+    else:
+        raise ValueError(
+            f"Unable to create WLAN device for type {type(hardware_device)}"
+        )
+
+    assert isinstance(device, SupportsWLAN)
+    return device
diff --git a/src/antlion/test_utils/abstract_devices/wmm_transceiver.py b/packages/antlion/test_utils/abstract_devices/wmm_transceiver.py
similarity index 78%
rename from src/antlion/test_utils/abstract_devices/wmm_transceiver.py
rename to packages/antlion/test_utils/abstract_devices/wmm_transceiver.py
index e38d91a..8609807 100644
--- a/src/antlion/test_utils/abstract_devices/wmm_transceiver.py
+++ b/packages/antlion/test_utils/abstract_devices/wmm_transceiver.py
@@ -17,15 +17,18 @@
 import logging
 import multiprocessing
 import time
-
 from datetime import datetime
-from uuid import uuid4
+from multiprocessing.managers import DictProxy
+from typing import Any, Mapping
+from uuid import UUID, uuid4
 
-from antlion import signals
-from antlion import tracelogger
+from mobly import logger, signals
+
 from antlion import utils
-from antlion.controllers import iperf_client
-from antlion.controllers import iperf_server
+from antlion.controllers import iperf_client, iperf_server
+from antlion.controllers.access_point import AccessPoint
+from antlion.test_utils.abstract_devices.wlan_device import SupportsWLAN
+from antlion.validation import MapValidator
 
 AC_VO = "AC_VO"
 AC_VI = "AC_VI"
@@ -50,37 +53,42 @@
     pass
 
 
-def create(config, identifier=None, wlan_devices=None, access_points=None):
+def create(
+    config: Mapping[str, Any],
+    identifier: str | None = None,
+    wlan_devices: list[SupportsWLAN] | None = None,
+    access_points: list[AccessPoint] | None = None,
+):
     """Creates a WmmTransceiver from a config.
 
     Args:
-        config: dict, config parameters for the transceiver. Contains:
-            - iperf_config: dict, the config to use for creating IPerfClients
-                and IPerfServers (excluding port).
-            - port_range_start: int, the lower bound of the port range to use
-                for creating IPerfServers. Defaults to 5201.
-            - wlan_device: string, the identifier of the wlan_device used for
-                this WmmTransceiver (optional)
+        config: Config parameters for the transceiver. Contains:
+            - iperf_config: dict, the config to use for creating IPerfClients and
+                IPerfServers (excluding port).
+            - port_range_start: int, the lower bound of the port range to use for
+                creating IPerfServers. Defaults to 5201.
+            - wlan_device: string, the identifier of the wlan_device used for this
+                WmmTransceiver (optional)
 
-        identifier: string, identifier for the WmmTransceiver. Must be provided
-            either as arg or in the config.
-        wlan_devices: list of WlanDevice objects from which to get the
-            wlan_device, if any, used as this transceiver
-        access_points: list of AccessPoint objects from which to get the
-            access_point, if any, used as this transceiver
+        identifier: Identifier for the WmmTransceiver. Must be provided either as arg or
+            in the config.
+        wlan_devices: WLAN devices from which to get the wlan_device, if any, used as
+            this transceiver
+        access_points: Access points from which to get the access_point, if any, used as
+            this transceiver
     """
     try:
-        # If identifier is not provided as func arg, it must be provided via
-        # config file.
-        if not identifier:
-            identifier = config["identifier"]
         iperf_config = config["iperf_config"]
-
     except KeyError as err:
         raise WmmTransceiverError(
-            "Parameter not provided as func arg, nor found in config: %s" % err
+            f"Parameter not provided as func arg, nor found in config: {err}"
         )
 
+    if not identifier:
+        # If identifier is not provided as func arg, it must be provided via
+        # config file.
+        identifier = MapValidator(config).get(str, "identifier")
+
     if wlan_devices is None:
         wlan_devices = []
 
@@ -105,43 +113,48 @@
     )
 
 
-def _find_wlan_device(wlan_device_identifier, wlan_devices):
-    """Returns WlanDevice based on string identifier (e.g. ip, serial, etc.)
+def _find_wlan_device(
+    wlan_device_identifier: str, wlan_devices: list[SupportsWLAN]
+) -> SupportsWLAN:
+    """Returns WLAN device based on string identifier (e.g. ip, serial, etc.)
 
     Args:
-        wlan_device_identifier: string, identifier for the desired WlanDevice
-        wlan_devices: list, WlanDevices to search through
+        wlan_device_identifier: Identifier for the desired WLAN device
+        wlan_devices: WLAN devices to search through
 
     Returns:
-        WlanDevice, with identifier matching wlan_device_identifier
+        A WLAN device matching wlan_device_identifier
 
     Raises:
-        WmmTransceiverError, if no WlanDevice matches identifier
+        WmmTransceiverError, if no WLAN devices matches wlan_device_identifier
     """
     for wd in wlan_devices:
         if wlan_device_identifier == wd.identifier:
             return wd
     raise WmmTransceiverError(
-        "No WlanDevice with identifier: %s" % wlan_device_identifier
+        f'No WLAN device with identifier "{wlan_device_identifier}"'
     )
 
 
-def _find_access_point(access_point_ip, access_points):
+def _find_access_point(
+    access_point_ip: str, access_points: list[AccessPoint]
+) -> AccessPoint:
     """Returns AccessPoint based on string ip address
 
     Args:
-        access_point_ip: string, control plane ip addr of the desired AP,
-        access_points: list, AccessPoints to search through
+        access_point_ip: Control plane IP address of the desired AP
+        access_points: Access points to search through
 
     Returns:
-        AccessPoint, with hostname matching access_point_ip
+        Access point with hostname matching access_point_ip
 
     Raises:
-        WmmTransceiverError, if no AccessPoint matches ip"""
+        WmmTransceiverError, if no access points matches access_point_ip
+    """
     for ap in access_points:
         if ap.ssh_settings.hostname == access_point_ip:
             return ap
-    raise WmmTransceiverError("No AccessPoint with ip: %s" % access_point_ip)
+    raise WmmTransceiverError(f"No AccessPoint with ip: {access_point_ip}")
 
 
 class WmmTransceiver(object):
@@ -156,12 +169,17 @@
         port_range_start=5201,
     ):
         self.identifier = identifier
-        self.log = tracelogger.TraceLogger(
-            WmmTransceiverLoggerAdapter(
-                logging.getLogger(), {"identifier": self.identifier}
-            )
+        self.log = logger.PrefixLoggerAdapter(
+            logging.getLogger(),
+            {
+                logger.PrefixLoggerAdapter.EXTRA_KEY_LOG_PREFIX: (
+                    f"[WmmTransceiver | {self.identifier}]"
+                    if self.identifier
+                    else "[WmmTransceiver]"
+                ),
+            },
         )
-        # WlanDevice or AccessPoint, that is used as the transceiver. Only one
+        # WLAN device or AccessPoint, that is used as the transceiver. Only one
         # will be set. This helps consolodate association, setup, teardown, etc.
         self.wlan_device = wlan_device
         self.access_point = access_point
@@ -241,7 +259,7 @@
         self._validate_server_address(server_ip, uuid)
 
         self.log.info(
-            "Running synchronous stream to %s WmmTransceiver" % receiver.identifier
+            f"Running synchronous stream to {receiver.identifier} WmmTransceiver"
         )
         self._run_traffic(
             uuid,
@@ -291,7 +309,7 @@
         }
 
         self._pending_async_streams[uuid] = pending_stream_config
-        self.log.info("Stream to %s WmmTransceiver prepared." % receiver.identifier)
+        self.log.info(f"Stream to {receiver.identifier} WmmTransceiver prepared.")
         return uuid
 
     def start_asynchronous_streams(self, start_time=None):
@@ -352,7 +370,7 @@
 
         # Releases resources for any streams that were prepared, but no run
         for uuid in self._pending_async_streams:
-            self.log.error("Pending asynchronous stream %s never ran. Cleaning." % uuid)
+            self.log.error(f"Pending asynchronous stream {uuid} never ran. Cleaning.")
             self._return_stream_resources(uuid)
         self._pending_async_streams.clear()
 
@@ -363,8 +381,7 @@
             process.join(timeout)
             if process.is_alive():
                 self.log.error(
-                    "Stream process failed to join in %s seconds. Terminating."
-                    % timeout
+                    f"Stream process failed to join in {timeout} seconds. Terminating."
                 )
                 process.terminate()
                 process.join()
@@ -400,16 +417,16 @@
 
     def _run_traffic(
         self,
-        uuid,
-        client,
-        server_ip,
-        server_port,
-        active_streams,
-        stream_results,
-        access_category=None,
-        bandwidth=None,
-        stream_time=DEFAULT_STREAM_TIME,
-        start_time=None,
+        uuid: UUID,
+        client: iperf_client.IPerfClientBase,
+        server_ip: str,
+        server_port: int,
+        active_streams: DictProxy[Any, Any],
+        stream_results: DictProxy[Any, Any],
+        access_category: str | None = None,
+        bandwidth: int | None = None,
+        stream_time: int = DEFAULT_STREAM_TIME,
+        start_time: float | None = None,
     ):
         """Runs an iperf3 stream.
 
@@ -419,45 +436,33 @@
         4. Removes stream UUID from active_streams
 
         Args:
-            uuid: UUID object, identifier for stream
+            uuid: Identifier for stream
             client: IPerfClient object on device
-            server_ip: string, ip address of IPerfServer for stream
-            server_port: int, port of the IPerfServer for stream
-            active_streams: multiprocessing.Manager.dict, which holds stream
-                UUIDs of active streams on the device
-            stream_results: multiprocessing.Manager.dict, which maps stream
-                UUIDs of streams to IPerfResult objects
-            access_category: string, WMM access category to use with iperf
-                (AC_BK, AC_BE, AC_VI, AC_VO). Unset if None.
-            bandwidth: int, bandwidth in mbps to use with iperf. Implies UDP.
-                Unlimited if None.
-            stream_time: int, time in seconds, to run iperf stream
-            start_time: float, time, seconds since epoch, at which to start the
-                stream (for better synchronicity). If None, start immediately.
+            server_ip: IP address of IPerfServer for stream
+            server_port: port of the IPerfServer for stream
+            active_streams: holds stream UUIDs of active streams on the device
+            stream_results: maps stream UUIDs of streams to IPerfResult objects
+            access_category: WMM access category to use with iperf (AC_BK, AC_BE, AC_VI,
+                AC_VO). Unset if None.
+            bandwidth: Bandwidth in mbps to use with iperf. Implies UDP. Unlimited if
+                None.
+            stream_time: Time in seconds, to run iperf stream
+            start_time: Time, seconds since epoch, at which to start the stream (for
+                better synchronicity). If None, start immediately.
         """
         active_streams[uuid] = True
-        # SSH sessions must be started within the process that is going to
-        # use it.
-        if type(client) == iperf_client.IPerfClientOverSsh:
-            with utils.SuppressLogOutput():
-                client.start_ssh()
 
         ac_flag = ""
         bandwidth_flag = ""
-        time_flag = "-t %s" % stream_time
+        time_flag = f"-t {stream_time}"
 
         if access_category:
-            ac_flag = " -S %s" % DEFAULT_AC_TO_TOS_TAG_MAP[access_category]
+            ac_flag = f" -S {DEFAULT_AC_TO_TOS_TAG_MAP[access_category]}"
 
         if bandwidth:
-            bandwidth_flag = " -u -b %sM" % bandwidth
+            bandwidth_flag = f" -u -b {bandwidth}M"
 
-        iperf_flags = "-p %s -i 1 %s%s%s -J" % (
-            server_port,
-            time_flag,
-            ac_flag,
-            bandwidth_flag,
-        )
+        iperf_flags = f"-p {server_port} -i 1 {time_flag}{ac_flag}{bandwidth_flag} -J"
         if not start_time:
             start_time = time.time()
         time_str = datetime.fromtimestamp(start_time).strftime("%H:%M:%S.%f")
@@ -478,13 +483,11 @@
             current_time = time.time()
             while current_time < start_time:
                 current_time = time.time()
-        path = client.start(server_ip, iperf_flags, "%s" % uuid)
+        path = client.start(server_ip, iperf_flags, f"{uuid}")
         stream_results[uuid] = iperf_server.IPerfResult(
             path, reporting_speed_units="mbps"
         )
 
-        if type(client) == iperf_client.IPerfClientOverSsh:
-            client.close_ssh()
         active_streams.pop(uuid)
 
     def _get_stream_resources(self, uuid, receiver, subnet):
@@ -512,7 +515,7 @@
             uuid: UUID object, identifier of the stream
         """
         if uuid in self._active_streams:
-            raise EnvironmentError("Resource still being used by stream %s" % uuid)
+            raise EnvironmentError(f"Resource still being used by stream {uuid}")
         (receiver, server_port) = self._reserved_servers.pop(uuid)
         receiver._release_server(server_port)
         client = self._reserved_clients.pop(uuid)
@@ -536,11 +539,6 @@
                 break
         else:
             reserved_client = iperf_client.create([self._iperf_config])[0]
-            # Due to the nature of multiprocessing, ssh connections must
-            # be started inside the parallel processes, so it must be closed
-            # here.
-            if type(reserved_client) == iperf_client.IPerfClientOverSsh:
-                reserved_client.close_ssh()
 
         self._iperf_clients[reserved_client] = UNAVAILABLE
         self._reserved_clients[uuid] = reserved_client
@@ -603,7 +601,7 @@
                 if utils.ip_in_subnet(addr, subnet):
                     return (addr, reserved_server.port)
         raise AttributeError(
-            "Reserved server has no ipv4 address in the %s subnet" % subnet
+            f"Reserved server has no ipv4 address in the {subnet} subnet"
         )
 
     def _release_server(self, server_port):
@@ -630,7 +628,7 @@
         Raises:
             WmmTransceiverError, if, after timeout, server ip is unreachable.
         """
-        self.log.info("Verifying server address (%s) is reachable." % server_ip)
+        self.log.info(f"Verifying server address ({server_ip}) is reachable.")
         end_time = time.time() + timeout
         while time.time() < end_time:
             if self.can_ping(server_ip):
@@ -643,9 +641,7 @@
                 time.sleep(1)
         else:
             self._return_stream_resources(uuid)
-            raise WmmTransceiverError(
-                "IPerfServer address (%s) unreachable." % server_ip
-            )
+            raise WmmTransceiverError(f"IPerfServer address ({server_ip}) unreachable.")
 
     def can_ping(self, dest_ip):
         """Utilizes can_ping function in wlan_device or access_point device to
@@ -684,13 +680,3 @@
         bandwidth = stream_parameters.get("bandwidth", None)
         time = stream_parameters.get("time", DEFAULT_STREAM_TIME)
         return (receiver, access_category, bandwidth, time)
-
-
-class WmmTransceiverLoggerAdapter(logging.LoggerAdapter):
-    def process(self, msg, kwargs):
-        if self.extra["identifier"]:
-            log_identifier = " | %s" % self.extra["identifier"]
-        else:
-            log_identifier = ""
-        msg = "[WmmTransceiver%s] %s" % (log_identifier, msg)
-        return (msg, kwargs)
diff --git a/src/antlion/test_utils/dhcp/__init__.py b/packages/antlion/test_utils/dhcp/__init__.py
similarity index 100%
rename from src/antlion/test_utils/dhcp/__init__.py
rename to packages/antlion/test_utils/dhcp/__init__.py
diff --git a/packages/antlion/test_utils/dhcp/base_test.py b/packages/antlion/test_utils/dhcp/base_test.py
new file mode 100644
index 0000000..8c50ac7
--- /dev/null
+++ b/packages/antlion/test_utils/dhcp/base_test.py
@@ -0,0 +1,309 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import time
+from dataclasses import dataclass
+from ipaddress import IPv4Address, IPv4Network
+from pathlib import Path
+
+from mobly import asserts, signals
+from mobly.config_parser import TestRunConfig
+
+from antlion import utils
+from antlion.controllers.access_point import AccessPoint, setup_ap
+from antlion.controllers.android_device import AndroidDevice
+from antlion.controllers.ap_lib import dhcp_config, hostapd_constants
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
+from antlion.controllers.ap_lib.hostapd_utils import generate_random_password
+from antlion.controllers.fuchsia_device import FuchsiaDevice
+from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
+from antlion.test_utils.wifi import base_test
+
+
+@dataclass
+class APParams:
+    id: str
+    ssid: str
+    security: Security
+    ip: IPv4Address
+    network: IPv4Network
+
+
+class Dhcpv4InteropFixture(base_test.WifiBaseTest):
+    """Test helpers for validating DHCPv4 Interop
+
+    Test Bed Requirement:
+    * One Android device or Fuchsia device
+    * One Access Point
+    """
+
+    def __init__(self, configs: TestRunConfig) -> None:
+        super().__init__(configs)
+        self.log = logging.getLogger()
+        self.fuchsia_device: FuchsiaDevice | None = None
+        self.access_point: AccessPoint = self.access_points[0]
+
+        device_type = self.user_params.get("dut", "fuchsia_devices")
+        if device_type == "fuchsia_devices":
+            self.fuchsia_device, self.dut = self.get_dut_type(
+                FuchsiaDevice, AssociationMode.POLICY
+            )
+        elif device_type == "android_devices":
+            _, self.dut = self.get_dut_type(AndroidDevice, AssociationMode.POLICY)
+        else:
+            raise ValueError(
+                f'Invalid "dut" type specified in config: "{device_type}".'
+                'Expected "fuchsia_devices" or "android_devices".'
+            )
+
+    def setup_class(self) -> None:
+        super().setup_class()
+        self.access_point.stop_all_aps()
+
+    def setup_test(self) -> None:
+        if hasattr(self, "android_devices"):
+            for ad in self.android_devices:
+                ad.droid.wakeLockAcquireBright()
+                ad.droid.wakeUpNow()
+        self.dut.wifi_toggle_state(True)
+
+    def teardown_test(self) -> None:
+        if hasattr(self, "android_devices"):
+            for ad in self.android_devices:
+                ad.droid.wakeLockRelease()
+                ad.droid.goToSleepNow()
+        self.dut.turn_location_off_and_scan_toggle_off()
+        self.dut.disconnect()
+        self.dut.reset_wifi()
+        self.access_point.stop_all_aps()
+
+    def connect(self, ap_params: APParams) -> None:
+        asserts.assert_true(
+            self.dut.associate(
+                ap_params.ssid,
+                target_pwd=ap_params.security.password,
+                target_security=ap_params.security.security_mode,
+            ),
+            "Failed to connect.",
+        )
+
+    def setup_ap(self) -> APParams:
+        """Generates a hostapd config and sets up the AP with that config.
+
+        Does not run a DHCP server.
+
+        Returns:
+            APParams for the newly setup AP.
+        """
+        ssid = utils.rand_ascii_str(20)
+        security = Security(
+            security_mode=SecurityMode.WPA2,
+            password=generate_random_password(length=20),
+            wpa_cipher="CCMP",
+            wpa2_cipher="CCMP",
+        )
+
+        ap_ids = setup_ap(
+            access_point=self.access_point,
+            profile_name="whirlwind",
+            mode=hostapd_constants.MODE_11N_MIXED,
+            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+            n_capabilities=[],
+            ac_capabilities=[],
+            force_wmm=True,
+            ssid=ssid,
+            security=security,
+        )
+
+        if len(ap_ids) > 1:
+            raise Exception("Expected only one SSID on AP")
+
+        configured_subnets = self.access_point.get_configured_subnets()
+        if len(configured_subnets) > 1:
+            raise Exception("Expected only one subnet on AP")
+        router_ip = configured_subnets[0].router
+        network = configured_subnets[0].network
+
+        self.access_point.stop_dhcp()
+
+        return APParams(
+            id=ap_ids[0],
+            ssid=ssid,
+            security=security,
+            ip=router_ip,
+            network=network,
+        )
+
+    def get_device_ipv4_addr(
+        self, interface: str | None = None, timeout_sec: float = 20.0
+    ) -> IPv4Address:
+        """Checks if device has an ipv4 private address.
+
+        Only supported on Fuchsia.
+
+        Args:
+            interface: name of interface from which to get ipv4 address.
+            timeout: seconds to wait until raising ConnectionError
+
+        Raises:
+            ConnectionError, if DUT does not have an ipv4 address after all
+            timeout.
+
+        Returns:
+            The device's IP address
+        """
+        if self.fuchsia_device is None:
+            # TODO(http://b/292289291): Add get_(ipv4|ipv6)_addr to SupportsIP.
+            raise TypeError(
+                "TODO(http://b/292289291): get_device_ipv4_addr only supports "
+                "FuchsiaDevice"
+            )
+
+        self.log.debug("Fetching updated WLAN interface list")
+        if interface is None:
+            interface = self.dut.get_default_wlan_test_interface()
+        self.log.info(
+            "Checking if DUT has received an ipv4 addr on iface %s. Will retry for %s "
+            "seconds." % (interface, timeout_sec)
+        )
+        timeout_sec = time.time() + timeout_sec
+        while time.time() < timeout_sec:
+            ip_addrs = self.fuchsia_device.get_interface_ip_addresses(interface)
+
+            if len(ip_addrs["ipv4_private"]) > 0:
+                ip = ip_addrs["ipv4_private"][0]
+                self.log.info(f"DUT has an ipv4 address: {ip}")
+                return IPv4Address(ip)
+            else:
+                self.log.debug(
+                    "DUT does not yet have an ipv4 address...retrying in 1 " "second."
+                )
+                time.sleep(1)
+        else:
+            raise ConnectionError("DUT failed to get an ipv4 address.")
+
+    def run_test_case_expect_dhcp_success(
+        self, dhcp_parameters: dict[str, str], dhcp_options: dict[str, int | str]
+    ) -> None:
+        """Starts the AP and DHCP server, and validates that the client
+        connects and obtains an address.
+
+        Args:
+            dhcp_parameters: a dictionary of DHCP parameters
+            dhcp_options: a dictionary of DHCP options
+        """
+        ap_params = self.setup_ap()
+        subnet_conf = dhcp_config.Subnet(
+            subnet=ap_params.network,
+            router=ap_params.ip,
+            additional_parameters=dhcp_parameters,
+            additional_options=dhcp_options,
+        )
+        dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf])
+
+        self.log.debug("DHCP Configuration:\n%s\n", dhcp_conf.render_config_file())
+
+        with self.access_point.tcpdump.start(
+            self.access_point.wlan_5g, Path(self.log_path)
+        ):
+            self.access_point.start_dhcp(dhcp_conf=dhcp_conf)
+            self.connect(ap_params=ap_params)
+
+            # Typical log lines look like:
+            #
+            # dhcpd[26695]: DHCPDISCOVER from 01:23:45:67:89:ab via wlan1
+            # dhcpd[26695]: DHCPOFFER on 192.168.9.2 to 01:23:45:67:89:ab via wlan1
+            # dhcpd[26695]: DHCPREQUEST for 192.168.9.2 (192.168.9.1) from 01:23:45:67:89:ab via wlan1
+            # dhcpd[26695]: DHCPACK on 192.168.9.2 to 01:23:45:67:89:ab via wlan1
+
+            # Due to b/384790032, logs can also show duplicate DISCOVER and
+            # OFFER packets due to the Fuchsia DHCP client queuing packets while
+            # EAPOL is in progress:
+            #
+            # DHCPDISCOVER from 01:23:45:67:89:ab via wlan1
+            # DHCPOFFER on 192.168.9.2 to 01:23:45:67:89:ab via wlan1
+            # DHCPDISCOVER from 01:23:45:67:89:ab via wlan1
+            # DHCPOFFER on 192.168.9.2 to 01:23:45:67:89:ab via wlan1
+            # DHCPREQUEST for 192.168.9.2 (192.168.9.1) from 01:23:45:67:89:ab via wlan1
+            # DHCPACK on 192.168.9.2 to 01:23:45:67:89:ab via wlan1
+
+            try:
+                ip = self.get_device_ipv4_addr()
+            except ConnectionError:
+                self.log.warning("DHCP logs: %s", self.access_point.get_dhcp_logs())
+                raise signals.TestFailure("DUT failed to get an IP address")
+
+            # Get updates to DHCP logs
+            dhcp_logs = self.access_point.get_dhcp_logs()
+            if dhcp_logs is None:
+                raise signals.TestFailure("No DHCP logs")
+
+            # TODO(http://b/384790032): Replace with logic below with this
+            # comment once DHCP is started after EAPOL finishes. Or remove this
+            # comment if queueing is determined expected and acceptable
+            # behavior.
+            #
+            # expected_string = f"DHCPDISCOVER from"
+            # asserts.assert_equal(
+            #     dhcp_logs.count(expected_string),
+            #     1,
+            #     f'Incorrect count of DHCP Discovers ("{expected_string}") in logs',
+            #     dhcp_logs,
+            # )
+            #
+            # expected_string = f"DHCPOFFER on {ip}"
+            # asserts.assert_equal(
+            #     dhcp_logs.count(expected_string),
+            #     1,
+            #     f'Incorrect count of DHCP Offers ("{expected_string}") in logs',
+            #     dhcp_logs,
+            # )
+
+            discover_count = dhcp_logs.count("DHCPDISCOVER from")
+            offer_count = dhcp_logs.count(f"DHCPOFFER on {ip}")
+            asserts.assert_greater(
+                discover_count, 0, "Expected one or more DHCP Discovers", dhcp_logs
+            )
+            asserts.assert_equal(
+                discover_count,
+                offer_count,
+                "Expected an equal amount of DHCP Discovers and Offers",
+                dhcp_logs,
+            )
+
+            expected_string = f"DHCPREQUEST for {ip}"
+            asserts.assert_true(
+                dhcp_logs.count(expected_string) >= 1,
+                f'Incorrect count of DHCP Requests ("{expected_string}") in logs: '
+                + dhcp_logs
+                + "\n",
+            )
+
+            expected_string = f"DHCPACK on {ip}"
+            asserts.assert_true(
+                dhcp_logs.count(expected_string) >= 1,
+                f'Incorrect count of DHCP Acks ("{expected_string}") in logs: '
+                + dhcp_logs
+                + "\n",
+            )
+
+            self.log.info(f"Attempting to ping {ap_params.ip}...")
+            ping_result = self.dut.ping(str(ap_params.ip), count=2)
+            asserts.assert_true(
+                ping_result.success,
+                f"DUT failed to ping router at {ap_params.ip}: {ping_result}",
+            )
diff --git a/src/antlion/test_utils/fuchsia/__init__.py b/packages/antlion/test_utils/fuchsia/__init__.py
similarity index 100%
rename from src/antlion/test_utils/fuchsia/__init__.py
rename to packages/antlion/test_utils/fuchsia/__init__.py
diff --git a/src/antlion/test_utils/fuchsia/wmm_test_cases.py b/packages/antlion/test_utils/fuchsia/wmm_test_cases.py
similarity index 100%
rename from src/antlion/test_utils/fuchsia/wmm_test_cases.py
rename to packages/antlion/test_utils/fuchsia/wmm_test_cases.py
diff --git a/src/antlion/test_utils/net/__init__.py b/packages/antlion/test_utils/net/__init__.py
similarity index 100%
rename from src/antlion/test_utils/net/__init__.py
rename to packages/antlion/test_utils/net/__init__.py
diff --git a/src/antlion/test_utils/net/connectivity_const.py b/packages/antlion/test_utils/net/connectivity_const.py
similarity index 100%
rename from src/antlion/test_utils/net/connectivity_const.py
rename to packages/antlion/test_utils/net/connectivity_const.py
diff --git a/packages/antlion/test_utils/net/net_test_utils.py b/packages/antlion/test_utils/net/net_test_utils.py
new file mode 100644
index 0000000..2862031
--- /dev/null
+++ b/packages/antlion/test_utils/net/net_test_utils.py
@@ -0,0 +1,94 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import os
+
+from antlion.controllers import adb
+from antlion.test_utils.net import connectivity_const as cconst
+from antlion.utils import start_standing_subprocess, stop_standing_subprocess
+
+VPN_CONST = cconst.VpnProfile
+VPN_TYPE = cconst.VpnProfileType
+VPN_PARAMS = cconst.VpnReqParams
+TCPDUMP_PATH = "/data/local/tmp/"
+USB_CHARGE_MODE = "svc usb setFunctions"
+USB_TETHERING_MODE = "svc usb setFunctions rndis"
+ENABLE_HARDWARE_OFFLOAD = "settings put global tether_offload_disabled 0"
+DISABLE_HARDWARE_OFFLOAD = "settings put global tether_offload_disabled 1"
+DEVICE_IP_ADDRESS = "ip address"
+LOCALHOST = "192.168.1.1"
+
+# Time to wait for radio to up and running after reboot
+WAIT_TIME_AFTER_REBOOT = 10
+
+GCE_SSH = "gcloud compute ssh "
+GCE_SCP = "gcloud compute scp "
+
+
+def start_tcpdump(ad, test_name, interface="any"):
+    """Start tcpdump on all interfaces.
+
+    Args:
+        ad: android device object.
+        test_name: tcpdump file name will have this
+    """
+    ad.log.info("Starting tcpdump on all interfaces")
+    ad.adb.shell("killall -9 tcpdump", ignore_status=True)
+    ad.adb.shell(f"mkdir {TCPDUMP_PATH}", ignore_status=True)
+    ad.adb.shell(f"rm -rf {TCPDUMP_PATH}/*", ignore_status=True)
+
+    file_name = f"{TCPDUMP_PATH}/tcpdump_{ad.serial}_{test_name}.pcap"
+    ad.log.info("tcpdump file is %s", file_name)
+    cmd = f"adb -s {ad.serial} shell tcpdump -i {interface} -s0 -w {file_name}"
+    try:
+        return start_standing_subprocess(cmd, 5)
+    except Exception:
+        ad.log.exception(f"Could not start standing process {repr(cmd)}")
+
+    return None
+
+
+def stop_tcpdump(
+    ad, proc, test_name, pull_dump=True, adb_pull_timeout=adb.DEFAULT_ADB_PULL_TIMEOUT
+):
+    """Stops tcpdump on any iface.
+
+       Pulls the tcpdump file in the tcpdump dir if necessary.
+
+    Args:
+        ad: android device object.
+        proc: need to know which pid to stop
+        test_name: test name to save the tcpdump file
+        pull_dump: pull tcpdump file or not
+        adb_pull_timeout: timeout for adb_pull
+
+    Returns:
+      log_path of the tcpdump file
+    """
+    ad.log.info("Stopping and pulling tcpdump if any")
+    if proc is None:
+        return None
+    try:
+        stop_standing_subprocess(proc)
+    except Exception as e:
+        ad.log.warning(e)
+    if pull_dump:
+        log_path = os.path.join(ad.device_log_path, f"TCPDUMP_{ad.serial}")
+        os.makedirs(log_path, exist_ok=True)
+        ad.adb.pull(f"{TCPDUMP_PATH}/. {log_path}", timeout=adb_pull_timeout)
+        ad.adb.shell(f"rm -rf {TCPDUMP_PATH}/*", ignore_status=True)
+        file_name = f"tcpdump_{ad.serial}_{test_name}.pcap"
+        return f"{log_path}/{file_name}"
+    return None
diff --git a/src/antlion/test_utils/wifi/OWNERS b/packages/antlion/test_utils/wifi/OWNERS
similarity index 100%
rename from src/antlion/test_utils/wifi/OWNERS
rename to packages/antlion/test_utils/wifi/OWNERS
diff --git a/src/antlion/test_utils/wifi/__init__.py b/packages/antlion/test_utils/wifi/__init__.py
similarity index 100%
rename from src/antlion/test_utils/wifi/__init__.py
rename to packages/antlion/test_utils/wifi/__init__.py
diff --git a/packages/antlion/test_utils/wifi/base_test.py b/packages/antlion/test_utils/wifi/base_test.py
new file mode 100644
index 0000000..82a74e0
--- /dev/null
+++ b/packages/antlion/test_utils/wifi/base_test.py
@@ -0,0 +1,870 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+    Base Class for Defining Common WiFi Test Functionality
+"""
+
+import copy
+import os
+from typing import Any, TypedDict, TypeVar
+
+from mobly import signals
+from mobly.base_test import BaseTestClass
+from mobly.config_parser import TestRunConfig
+from mobly.records import TestResultRecord
+
+from antlion import context, controllers, utils
+from antlion.controllers.access_point import AccessPoint
+from antlion.controllers.android_device import AndroidDevice
+from antlion.controllers.ap_lib import hostapd_constants
+from antlion.controllers.ap_lib.hostapd_security import (
+    OpenWRTEncryptionMode,
+    SecurityMode,
+)
+from antlion.controllers.attenuator import Attenuator
+from antlion.controllers.fuchsia_device import FuchsiaDevice
+from antlion.controllers.iperf_client import IPerfClientBase
+from antlion.controllers.iperf_server import IPerfServer, IPerfServerOverSsh
+from antlion.controllers.openwrt_ap import PMF_ENABLED, BSSIDMap, OpenWrtAP
+from antlion.controllers.openwrt_lib.wireless_config import WirelessConfig
+from antlion.controllers.packet_capture import PacketCapture
+from antlion.controllers.pdu import PduDevice
+from antlion.keys import Config
+from antlion.test_utils.abstract_devices.wlan_device import (
+    AndroidWlanDevice,
+    AssociationMode,
+    FuchsiaWlanDevice,
+    SupportsWLAN,
+)
+from antlion.test_utils.net import net_test_utils as nutils
+from antlion.test_utils.wifi import wifi_test_utils as wutils
+from antlion.types import Controller
+from antlion.validation import MapValidator
+
+WifiEnums = wutils.WifiEnums
+MAX_AP_COUNT = 2
+
+
+class Network(TypedDict):
+    SSID: str
+    security: SecurityMode
+    password: str | None
+    hiddenSSID: bool
+    wepKeys: list[str] | None
+    ieee80211w: str | None
+
+
+class NetworkUpdate(TypedDict, total=False):
+    SSID: str
+    security: SecurityMode
+    password: str | None
+    hiddenSSID: bool
+    wepKeys: list[str] | None
+    ieee80211w: str | None
+
+
+NetworkList = dict[str, Network]
+
+_T = TypeVar("_T")
+
+
+class WifiBaseTest(BaseTestClass):
+    def __init__(self, configs: TestRunConfig) -> None:
+        super().__init__(configs)
+        self.enable_packet_log = False
+        self.packet_log_2g = hostapd_constants.AP_DEFAULT_CHANNEL_2G
+        self.packet_log_5g = hostapd_constants.AP_DEFAULT_CHANNEL_5G
+        self.tcpdump_proc: list[Any] = []
+        self.packet_log_pid: dict[str, Any] = {}
+
+        T = TypeVar("T")
+
+        def register_controller(module: Controller[T]) -> list[T]:
+            registered_controllers: list[T] | None = self.register_controller(
+                module, required=False
+            )
+            if registered_controllers is None:
+                return []
+            return registered_controllers
+
+        self.access_points: list[AccessPoint] = register_controller(
+            controllers.access_point
+        )
+        self.openwrt_aps: list[OpenWrtAP] = register_controller(controllers.openwrt_ap)
+        self.android_devices: list[AndroidDevice] = register_controller(
+            controllers.android_device
+        )
+        self.attenuators: list[Attenuator] = register_controller(controllers.attenuator)
+        self.fuchsia_devices: list[FuchsiaDevice] = register_controller(
+            controllers.fuchsia_device
+        )
+        self.iperf_clients: list[IPerfClientBase] = register_controller(
+            controllers.iperf_client
+        )
+        iperf_servers: list[IPerfServer | IPerfServerOverSsh] = register_controller(
+            controllers.iperf_server
+        )
+        self.iperf_servers = [
+            iperf_server
+            for iperf_server in iperf_servers
+            if isinstance(iperf_server, IPerfServerOverSsh)
+        ]
+        self.pdu_devices: list[PduDevice] = register_controller(controllers.pdu)
+        self.packet_capture: list[PacketCapture] = register_controller(
+            controllers.packet_capture
+        )
+
+        for attenuator in self.attenuators:
+            attenuator.set_atten(0)
+
+        self.pixel_models: list[str] | None = self.user_params.get("pixel_models")
+        self.cnss_diag_file: str | list[str] | None = self.user_params.get(
+            "cnss_diag_file"
+        )
+        self.country_code_file: str | list[str] | None = self.user_params.get(
+            "country_code_file"
+        )
+
+        if self.cnss_diag_file:
+            if isinstance(self.cnss_diag_file, list):
+                self.cnss_diag_file = self.cnss_diag_file[0]
+            if not os.path.isfile(self.cnss_diag_file):
+                self.cnss_diag_file = os.path.join(
+                    self.user_params[Config.key_config_path.value],
+                    self.cnss_diag_file,
+                )
+
+        self.packet_logger: PacketCapture | None = None
+        if self.enable_packet_log and self.packet_capture:
+            self.packet_logger = self.packet_capture[0]
+            self.packet_logger.configure_monitor_mode("2G", self.packet_log_2g)
+            self.packet_logger.configure_monitor_mode("5G", self.packet_log_5g)
+
+        for ad in self.android_devices:
+            wutils.wifi_test_device_init(ad)
+            if self.country_code_file:
+                if isinstance(self.country_code_file, list):
+                    self.country_code_file = self.country_code_file[0]
+                if not os.path.isfile(self.country_code_file):
+                    self.country_code_file = os.path.join(
+                        self.user_params[Config.key_config_path.value],
+                        self.country_code_file,
+                    )
+                self.country_code = utils.load_config(self.country_code_file)["country"]
+            else:
+                self.country_code = WifiEnums.CountryCode.US
+            wutils.set_wifi_country_code(ad, self.country_code)
+
+    def setup_test(self) -> None:
+        if self.android_devices and self.cnss_diag_file and self.pixel_models:
+            wutils.start_cnss_diags(
+                self.android_devices, self.cnss_diag_file, self.pixel_models
+            )
+        self.tcpdump_proc = []
+        for ad in self.android_devices:
+            proc = nutils.start_tcpdump(ad, self.current_test_info.name)
+            self.tcpdump_proc.append((ad, proc))
+        if self.packet_logger:
+            self.packet_log_pid = wutils.start_pcap(
+                self.packet_logger, "dual", self.current_test_info.name
+            )
+
+    def teardown_test(self) -> None:
+        if self.android_devices and self.cnss_diag_file and self.pixel_models:
+            wutils.stop_cnss_diags(self.android_devices, self.pixel_models)
+            for proc in self.tcpdump_proc:
+                nutils.stop_tcpdump(
+                    proc[0],
+                    proc[1],
+                    self.current_test_info.name,
+                    pull_dump=False,
+                )
+            self.tcpdump_proc = []
+        if self.packet_logger and self.packet_log_pid:
+            wutils.stop_pcap(self.packet_logger, self.packet_log_pid, test_status=True)
+            self.packet_log_pid = {}
+
+    def teardown_class(self) -> None:
+        super().teardown_class()
+        if hasattr(self, "fuchsia_devices"):
+            for device in self.fuchsia_devices:
+                device.take_bug_report()
+        if hasattr(self, "access_points") and hasattr(self, "iperf_servers"):
+            self.download_logs()
+
+    def on_fail(self, record: TestResultRecord) -> None:
+        """A function that is executed upon a test failure.
+
+        Args:
+        record: A copy of the test record for this test, containing all information of
+            the test execution including exception objects.
+        """
+        if self.android_devices:
+            for ad in self.android_devices:
+                ad.take_bug_report(record.test_name, record.begin_time)
+                ad.cat_adb_log(record.test_name, record.begin_time)
+                wutils.get_ssrdumps(ad)
+            if self.cnss_diag_file and self.pixel_models:
+                wutils.stop_cnss_diags(self.android_devices, self.pixel_models)
+                for ad in self.android_devices:
+                    wutils.get_cnss_diag_log(ad)
+            for proc in self.tcpdump_proc:
+                nutils.stop_tcpdump(proc[0], proc[1], record.test_name)
+            self.tcpdump_proc = []
+        if self.packet_logger and self.packet_log_pid:
+            wutils.stop_pcap(self.packet_logger, self.packet_log_pid, test_status=False)
+            self.packet_log_pid = {}
+
+        # Gets a wlan_device log and calls the generic device fail on DUT.
+        for fd in self.fuchsia_devices:
+            self.on_device_fail(fd, record)
+
+    def on_device_fail(self, device: FuchsiaDevice, _: TestResultRecord) -> None:
+        """Gets a generic device DUT bug report.
+
+        This method takes a bug report if the device has the
+        'take_bug_report_on_fail' config value, and if the flag is true. This
+        method also power cycles if 'hard_reboot_on_fail' is True.
+
+        Args:
+            device: Generic device to gather logs from.
+            record: More information about the test.
+        """
+        if (
+            not hasattr(device, "take_bug_report_on_fail")
+            or device.take_bug_report_on_fail
+        ):
+            device.take_bug_report()
+
+        if hasattr(device, "hard_reboot_on_fail") and device.hard_reboot_on_fail:
+            device.reboot(reboot_type="hard", testbed_pdus=self.pdu_devices)
+
+    def get_dut(self, association_mode: AssociationMode) -> SupportsWLAN:
+        """Get the DUT based on user_params, default to Fuchsia."""
+        device_type = self.user_params.get("dut", "fuchsia_devices")
+        if device_type == "fuchsia_devices":
+            return self.get_dut_type(FuchsiaDevice, association_mode)[1]
+        elif device_type == "android_devices":
+            return self.get_dut_type(FuchsiaDevice, association_mode)[1]
+        else:
+            raise signals.TestAbortClass(
+                f'Invalid "dut" type specified in config: "{device_type}". '
+                'Expected "fuchsia_devices" or "android_devices".'
+            )
+
+    def get_dut_type(
+        self, device_type: type[_T], association_mode: AssociationMode
+    ) -> tuple[_T, SupportsWLAN]:
+        if device_type is FuchsiaDevice:
+            if len(self.fuchsia_devices) == 0:
+                raise signals.TestAbortClass("Requires at least one Fuchsia device")
+            fd = self.fuchsia_devices[0]
+            assert isinstance(fd, device_type)
+            return fd, FuchsiaWlanDevice(fd, association_mode)
+
+        if device_type is AndroidDevice:
+            if len(self.android_devices) == 0:
+                raise signals.TestAbortClass("Requires at least one Android device")
+            ad = self.android_devices[0]
+            assert isinstance(ad, device_type)
+            return ad, AndroidWlanDevice(ad)
+
+        raise signals.TestAbortClass(
+            f"Invalid device_type specified: {device_type.__name__}. "
+            "Expected FuchsiaDevice or AndroidDevice."
+        )
+
+    def download_logs(self) -> None:
+        """Downloads the DHCP and hostapad logs from the access_point.
+
+        Using the current TestClassContext and TestCaseContext this method pulls
+        the DHCP and hostapd logs and outputs them to the correct path.
+        """
+        current_path = context.get_current_context().get_full_output_path()
+        for access_point in self.access_points:
+            access_point.download_ap_logs(current_path)
+        for iperf_server in self.iperf_servers:
+            iperf_server.download_logs(current_path)
+
+    def get_psk_network(
+        self,
+        mirror_ap: bool,
+        reference_networks: list[NetworkList],
+        hidden: bool = False,
+        same_ssid: bool = False,
+        security_mode: SecurityMode = SecurityMode.WPA2,
+        ssid_length_2g: int = hostapd_constants.AP_SSID_LENGTH_2G,
+        ssid_length_5g: int = hostapd_constants.AP_SSID_LENGTH_5G,
+        passphrase_length_2g: int = hostapd_constants.AP_PASSPHRASE_LENGTH_2G,
+        passphrase_length_5g: int = hostapd_constants.AP_PASSPHRASE_LENGTH_5G,
+    ) -> NetworkList:
+        """Generates SSID and passphrase for a WPA2 network using random
+        generator.
+
+        Args:
+            mirror_ap: Determines if both APs use the same hostapd config or
+                different configs.
+            reference_networks: PSK networks.
+            same_ssid: Determines if both bands on AP use the same SSID.
+            ssid_length_2g: Number of characters to use for 2G SSID.
+            ssid_length_5g: Number of characters to use for 5G SSID.
+            passphrase_length_2g: Length of password for 2G network.
+            passphrase_length_5g: Length of password for 5G network.
+
+        Returns: A dict of 2G and 5G network lists for hostapd configuration.
+        """
+        if same_ssid:
+            ref_2g_ssid = f"xg_{utils.rand_ascii_str(ssid_length_2g)}"
+            ref_5g_ssid = ref_2g_ssid
+
+            ref_2g_passphrase = utils.rand_ascii_str(passphrase_length_2g)
+            ref_5g_passphrase = ref_2g_passphrase
+
+        else:
+            ref_2g_ssid = f"2g_{utils.rand_ascii_str(ssid_length_2g)}"
+            ref_2g_passphrase = utils.rand_ascii_str(passphrase_length_2g)
+
+            ref_5g_ssid = f"5g_{utils.rand_ascii_str(ssid_length_5g)}"
+            ref_5g_passphrase = utils.rand_ascii_str(passphrase_length_5g)
+
+        network_dict_2g = Network(
+            SSID=ref_2g_ssid,
+            security=security_mode,
+            password=ref_2g_passphrase,
+            hiddenSSID=hidden,
+            wepKeys=None,
+            ieee80211w=None,
+        )
+
+        network_dict_5g = Network(
+            SSID=ref_5g_ssid,
+            security=security_mode,
+            password=ref_5g_passphrase,
+            hiddenSSID=hidden,
+            wepKeys=None,
+            ieee80211w=None,
+        )
+
+        for _ in range(MAX_AP_COUNT):
+            reference_networks.append(
+                {
+                    "2g": copy.copy(network_dict_2g),
+                    "5g": copy.copy(network_dict_5g),
+                }
+            )
+            if not mirror_ap:
+                break
+        return {"2g": network_dict_2g, "5g": network_dict_5g}
+
+    def get_open_network(
+        self,
+        mirror_ap: bool,
+        open_network: list[NetworkList],
+        hidden: bool = False,
+        same_ssid: bool = False,
+        ssid_length_2g: int = hostapd_constants.AP_SSID_LENGTH_2G,
+        ssid_length_5g: int = hostapd_constants.AP_SSID_LENGTH_5G,
+        security_mode: SecurityMode = SecurityMode.OPEN,
+    ) -> NetworkList:
+        """Generates SSIDs for a open network using a random generator.
+
+        Args:
+            mirror_ap: Boolean, determines if both APs use the same hostapd
+                       config or different configs.
+            open_network: List of open networks.
+            same_ssid: Boolean, determines if both bands on AP use the same
+                       SSID.
+            ssid_length_2g: Int, number of characters to use for 2G SSID.
+            ssid_length_5g: Int, number of characters to use for 5G SSID.
+            security_mode: 'none' for open and 'OWE' for WPA3 OWE.
+
+        Returns: A dict of 2G and 5G network lists for hostapd configuration.
+
+        """
+        if same_ssid:
+            open_2g_ssid = f"xg_{utils.rand_ascii_str(ssid_length_2g)}"
+            open_5g_ssid = open_2g_ssid
+        else:
+            open_2g_ssid = f"2g_{utils.rand_ascii_str(ssid_length_2g)}"
+            open_5g_ssid = f"5g_{utils.rand_ascii_str(ssid_length_5g)}"
+
+        network_dict_2g = Network(
+            SSID=open_2g_ssid,
+            security=security_mode,
+            password=None,
+            hiddenSSID=hidden,
+            wepKeys=None,
+            ieee80211w=None,
+        )
+
+        network_dict_5g = Network(
+            SSID=open_5g_ssid,
+            security=security_mode,
+            password=None,
+            hiddenSSID=hidden,
+            wepKeys=None,
+            ieee80211w=None,
+        )
+
+        for _ in range(MAX_AP_COUNT):
+            open_network.append(
+                {
+                    "2g": copy.copy(network_dict_2g),
+                    "5g": copy.copy(network_dict_5g),
+                }
+            )
+            if not mirror_ap:
+                break
+        return {"2g": network_dict_2g, "5g": network_dict_5g}
+
+    def get_wep_network(
+        self,
+        mirror_ap: bool,
+        networks: list[NetworkList],
+        hidden: bool = False,
+        same_ssid: bool = False,
+        ssid_length_2g: int = hostapd_constants.AP_SSID_LENGTH_2G,
+        ssid_length_5g: int = hostapd_constants.AP_SSID_LENGTH_5G,
+        passphrase_length_2g: int = hostapd_constants.AP_PASSPHRASE_LENGTH_2G,
+        passphrase_length_5g: int = hostapd_constants.AP_PASSPHRASE_LENGTH_5G,
+    ) -> NetworkList:
+        """Generates SSID and passphrase for a WEP network using random
+        generator.
+
+        Args:
+            mirror_ap: Determines if both APs use the same hostapd config or
+                different configs.
+            networks: List of WEP networks.
+            same_ssid: Determines if both bands on AP use the same SSID.
+            ssid_length_2g: Number of characters to use for 2G SSID.
+            ssid_length_5g: Number of characters to use for 5G SSID.
+            passphrase_length_2g: Length of password for 2G network.
+            passphrase_length_5g: Length of password for 5G network.
+
+        Returns: A dict of 2G and 5G network lists for hostapd configuration.
+
+        """
+        if same_ssid:
+            ref_2g_ssid = f"xg_{utils.rand_ascii_str(ssid_length_2g)}"
+            ref_5g_ssid = ref_2g_ssid
+
+            ref_2g_passphrase = utils.rand_hex_str(passphrase_length_2g)
+            ref_5g_passphrase = ref_2g_passphrase
+
+        else:
+            ref_2g_ssid = f"2g_{utils.rand_ascii_str(ssid_length_2g)}"
+            ref_2g_passphrase = utils.rand_hex_str(passphrase_length_2g)
+
+            ref_5g_ssid = f"5g_{utils.rand_ascii_str(ssid_length_5g)}"
+            ref_5g_passphrase = utils.rand_hex_str(passphrase_length_5g)
+
+        network_dict_2g = Network(
+            SSID=ref_2g_ssid,
+            security=SecurityMode.WEP,
+            password=None,
+            hiddenSSID=hidden,
+            wepKeys=[ref_2g_passphrase] * 4,
+            ieee80211w=None,
+        )
+
+        network_dict_5g = Network(
+            SSID=ref_5g_ssid,
+            security=SecurityMode.WEP,
+            password=None,
+            hiddenSSID=hidden,
+            wepKeys=[ref_5g_passphrase] * 4,
+            ieee80211w=None,
+        )
+
+        for _ in range(MAX_AP_COUNT):
+            networks.append(
+                {
+                    "2g": copy.copy(network_dict_2g),
+                    "5g": copy.copy(network_dict_5g),
+                }
+            )
+            if not mirror_ap:
+                break
+        return {"2g": network_dict_2g, "5g": network_dict_5g}
+
+    def configure_openwrt_ap_and_start(
+        self,
+        channel_5g: int = hostapd_constants.AP_DEFAULT_CHANNEL_5G,
+        channel_2g: int = hostapd_constants.AP_DEFAULT_CHANNEL_2G,
+        channel_5g_ap2: int | None = None,
+        channel_2g_ap2: int | None = None,
+        ssid_length_2g: int = hostapd_constants.AP_SSID_LENGTH_2G,
+        passphrase_length_2g: int = hostapd_constants.AP_PASSPHRASE_LENGTH_2G,
+        ssid_length_5g: int = hostapd_constants.AP_SSID_LENGTH_5G,
+        passphrase_length_5g: int = hostapd_constants.AP_PASSPHRASE_LENGTH_5G,
+        mirror_ap: bool = False,
+        hidden: bool = False,
+        same_ssid: bool = False,
+        open_network: bool = False,
+        wpa1_network: bool = False,
+        wpa_network: bool = False,
+        wep_network: bool = False,
+        ent_network: bool = False,
+        ent_network_pwd: bool = False,
+        owe_network: bool = False,
+        sae_network: bool = False,
+        saemixed_network: bool = False,
+        radius_conf_2g: dict[str, Any] | None = None,
+        radius_conf_5g: dict[str, Any] | None = None,
+        radius_conf_pwd: dict[str, Any] | None = None,
+        ap_count: int = 1,
+        ieee80211w: int | None = None,
+    ) -> None:
+        """Create, configure and start OpenWrt AP.
+
+        Args:
+            channel_5g: 5G channel to configure.
+            channel_2g: 2G channel to configure.
+            channel_5g_ap2: 5G channel to configure on AP2.
+            channel_2g_ap2: 2G channel to configure on AP2.
+            ssid_length_2g: Int, number of characters to use for 2G SSID.
+            passphrase_length_2g: Int, length of password for 2G network.
+            ssid_length_5g: Int, number of characters to use for 5G SSID.
+            passphrase_length_5g: Int, length of password for 5G network.
+            same_ssid: Boolean, determines if both bands on AP use the same SSID.
+            open_network: Boolean, to check if open network should be configured.
+            wpa_network: Boolean, to check if wpa network should be configured.
+            wep_network: Boolean, to check if wep network should be configured.
+            ent_network: Boolean, to check if ent network should be configured.
+            ent_network_pwd: Boolean, to check if ent pwd network should be configured.
+            owe_network: Boolean, to check if owe network should be configured.
+            sae_network: Boolean, to check if sae network should be configured.
+            saemixed_network: Boolean, to check if saemixed network should be configured.
+            radius_conf_2g: dictionary with enterprise radius server details.
+            radius_conf_5g: dictionary with enterprise radius server details.
+            radius_conf_pwd: dictionary with enterprise radiuse server details.
+            ap_count: APs to configure.
+            ieee80211w:PMF to configure
+        """
+        if mirror_ap and ap_count == 1:
+            raise ValueError("ap_count cannot be 1 if mirror_ap is True.")
+        if (channel_5g_ap2 or channel_2g_ap2) and ap_count == 1:
+            raise ValueError("ap_count cannot be 1 if channels of AP2 are provided.")
+        # we are creating a channel list for 2G and 5G bands. The list is of
+        # size 2 and this is based on the assumption that each testbed will have
+        # at most 2 APs.
+        if not channel_5g_ap2:
+            channel_5g_ap2 = channel_5g
+        if not channel_2g_ap2:
+            channel_2g_ap2 = channel_2g
+        channels_2g = [channel_2g, channel_2g_ap2]
+        channels_5g = [channel_5g, channel_5g_ap2]
+
+        if radius_conf_2g is None:
+            radius_conf_2g = {}
+        if radius_conf_5g is None:
+            radius_conf_5g = {}
+        if radius_conf_pwd is None:
+            radius_conf_pwd = {}
+
+        self.bssid_map: list[BSSIDMap] = []
+        for i in range(ap_count):
+            configs: list[WirelessConfig] = []
+
+            num_2g: int = 1
+            num_5g: int = 1
+
+            if wpa1_network:
+                networks = self.get_psk_network(
+                    mirror_ap,
+                    [],
+                    hidden,
+                    same_ssid,
+                    SecurityMode.WPA,
+                    ssid_length_2g,
+                    ssid_length_5g,
+                    passphrase_length_2g,
+                    passphrase_length_5g,
+                )
+
+                def add_config(name: str, band: str) -> None:
+                    configs.append(
+                        WirelessConfig(
+                            name=name,
+                            ssid=networks[band]["SSID"],
+                            security=OpenWRTEncryptionMode.PSK,
+                            band=band,
+                            password=networks[band]["password"],
+                            hidden=networks[band]["hiddenSSID"],
+                            ieee80211w=ieee80211w,
+                        )
+                    )
+
+                add_config(f"wifi_2g_{num_2g}", hostapd_constants.BAND_2G)
+                add_config(f"wifi_5g_{num_5g}", hostapd_constants.BAND_5G)
+                num_2g += 1
+                num_5g += 1
+            if wpa_network:
+                networks = self.get_psk_network(
+                    mirror_ap,
+                    [],
+                    hidden,
+                    same_ssid,
+                    SecurityMode.WPA2,
+                    ssid_length_2g,
+                    ssid_length_5g,
+                    passphrase_length_2g,
+                    passphrase_length_5g,
+                )
+
+                def add_config(name: str, band: str) -> None:
+                    configs.append(
+                        WirelessConfig(
+                            name=name,
+                            ssid=networks[band]["SSID"],
+                            security=OpenWRTEncryptionMode.PSK2,
+                            band=band,
+                            password=networks[band]["password"],
+                            hidden=networks[band]["hiddenSSID"],
+                            ieee80211w=ieee80211w,
+                        )
+                    )
+
+                add_config(f"wifi_2g_{num_2g}", hostapd_constants.BAND_2G)
+                add_config(f"wifi_5g_{num_5g}", hostapd_constants.BAND_5G)
+                num_2g += 1
+                num_5g += 1
+            if wep_network:
+                networks = self.get_wep_network(
+                    mirror_ap,
+                    [],
+                    hidden,
+                    same_ssid,
+                    ssid_length_2g,
+                    ssid_length_5g,
+                )
+
+                def add_config(name: str, band: str) -> None:
+                    configs.append(
+                        WirelessConfig(
+                            name=name,
+                            ssid=networks[band]["SSID"],
+                            security=OpenWRTEncryptionMode.WEP,
+                            band=band,
+                            wep_key=networks[band]["wepKeys"],
+                            hidden=networks[band]["hiddenSSID"],
+                        )
+                    )
+
+                add_config(f"wifi_2g_{num_2g}", hostapd_constants.BAND_2G)
+                add_config(f"wifi_5g_{num_5g}", hostapd_constants.BAND_5G)
+                num_2g += 1
+                num_5g += 1
+            if ent_network:
+                networks = self.get_open_network(
+                    mirror_ap,
+                    [],
+                    hidden,
+                    same_ssid,
+                    ssid_length_2g,
+                    ssid_length_5g,
+                    SecurityMode.WPA2,
+                )
+
+                def add_config_with_radius(
+                    name: str, band: str, radius_conf: dict[str, str | int | None]
+                ) -> None:
+                    conf = MapValidator(radius_conf)
+                    configs.append(
+                        WirelessConfig(
+                            name=name,
+                            ssid=networks[band]["SSID"],
+                            security=OpenWRTEncryptionMode.WPA2,
+                            band=band,
+                            radius_server_ip=conf.get(str, "radius_server_ip", None),
+                            radius_server_port=conf.get(
+                                int, "radius_server_port", None
+                            ),
+                            radius_server_secret=conf.get(
+                                str, "radius_server_secret", None
+                            ),
+                            hidden=networks[band]["hiddenSSID"],
+                        )
+                    )
+
+                add_config_with_radius(
+                    f"wifi_2g_{num_2g}",
+                    hostapd_constants.BAND_2G,
+                    radius_conf_2g,
+                )
+                add_config_with_radius(
+                    f"wifi_5g_{num_5g}",
+                    hostapd_constants.BAND_5G,
+                    radius_conf_5g,
+                )
+                num_2g += 1
+                num_5g += 1
+            if ent_network_pwd:
+                networks = self.get_open_network(
+                    mirror_ap,
+                    [],
+                    hidden,
+                    same_ssid,
+                    ssid_length_2g,
+                    ssid_length_5g,
+                    SecurityMode.WPA2,
+                )
+
+                radius_conf = {} if radius_conf_pwd is None else radius_conf_pwd
+
+                def add_config(name: str, band: str) -> None:
+                    configs.append(
+                        WirelessConfig(
+                            name=name,
+                            ssid=networks[band]["SSID"],
+                            security=OpenWRTEncryptionMode.WPA2,
+                            band=band,
+                            radius_server_ip=radius_conf.get("radius_server_ip"),
+                            radius_server_port=radius_conf.get("radius_server_port"),
+                            radius_server_secret=radius_conf.get(
+                                "radius_server_secret"
+                            ),
+                            hidden=networks[band]["hiddenSSID"],
+                        )
+                    )
+
+                add_config(f"wifi_2g_{num_2g}", hostapd_constants.BAND_2G)
+                add_config(f"wifi_5g_{num_5g}", hostapd_constants.BAND_5G)
+                num_2g += 1
+                num_5g += 1
+            if open_network:
+                networks = self.get_open_network(
+                    mirror_ap,
+                    [],
+                    hidden,
+                    same_ssid,
+                    ssid_length_2g,
+                    ssid_length_5g,
+                )
+
+                def add_config(name: str, band: str) -> None:
+                    configs.append(
+                        WirelessConfig(
+                            name=name,
+                            ssid=networks[band]["SSID"],
+                            security=OpenWRTEncryptionMode.NONE,
+                            band=band,
+                            hidden=networks[band]["hiddenSSID"],
+                        )
+                    )
+
+                add_config(f"wifi_2g_{num_2g}", hostapd_constants.BAND_2G)
+                add_config(f"wifi_5g_{num_5g}", hostapd_constants.BAND_5G)
+                num_2g += 1
+                num_5g += 1
+            if owe_network:
+                networks = self.get_open_network(
+                    mirror_ap,
+                    [],
+                    hidden,
+                    same_ssid,
+                    ssid_length_2g,
+                    ssid_length_5g,
+                )
+
+                def add_config(name: str, band: str) -> None:
+                    configs.append(
+                        WirelessConfig(
+                            name=name,
+                            ssid=networks[band]["SSID"],
+                            security=OpenWRTEncryptionMode.OWE,
+                            band=band,
+                            hidden=networks[band]["hiddenSSID"],
+                            ieee80211w=PMF_ENABLED,
+                        )
+                    )
+
+                add_config(f"wifi_2g_{num_2g}", hostapd_constants.BAND_2G)
+                add_config(f"wifi_5g_{num_5g}", hostapd_constants.BAND_5G)
+                num_2g += 1
+                num_5g += 1
+            if sae_network:
+                networks = self.get_psk_network(
+                    mirror_ap,
+                    [],
+                    hidden,
+                    same_ssid,
+                    ssid_length_2g=ssid_length_2g,
+                    ssid_length_5g=ssid_length_5g,
+                    passphrase_length_2g=passphrase_length_2g,
+                    passphrase_length_5g=passphrase_length_5g,
+                )
+
+                def add_config(name: str, band: str) -> None:
+                    configs.append(
+                        WirelessConfig(
+                            name=name,
+                            ssid=networks[band]["SSID"],
+                            security=OpenWRTEncryptionMode.SAE,
+                            band=band,
+                            password=networks[band]["password"],
+                            hidden=networks[band]["hiddenSSID"],
+                            ieee80211w=PMF_ENABLED,
+                        )
+                    )
+
+                add_config(f"wifi_2g_{num_2g}", hostapd_constants.BAND_2G)
+                add_config(f"wifi_5g_{num_5g}", hostapd_constants.BAND_5G)
+                num_2g += 1
+                num_5g += 1
+            if saemixed_network:
+                networks = self.get_psk_network(
+                    mirror_ap,
+                    [],
+                    hidden,
+                    same_ssid,
+                    ssid_length_2g=ssid_length_2g,
+                    ssid_length_5g=ssid_length_5g,
+                    passphrase_length_2g=passphrase_length_2g,
+                    passphrase_length_5g=passphrase_length_5g,
+                )
+
+                def add_config(name: str, band: str) -> None:
+                    configs.append(
+                        WirelessConfig(
+                            name=name,
+                            ssid=networks[band]["SSID"],
+                            security=OpenWRTEncryptionMode.SAE_MIXED,
+                            band=band,
+                            password=networks[band]["password"],
+                            hidden=networks[band]["hiddenSSID"],
+                            ieee80211w=ieee80211w,
+                        )
+                    )
+
+                add_config(f"wifi_2g_{num_2g}", hostapd_constants.BAND_2G)
+                add_config(f"wifi_5g_{num_5g}", hostapd_constants.BAND_5G)
+                num_2g += 1
+                num_5g += 1
+
+            openwrt_ap = self.openwrt_aps[i]
+            openwrt_ap.configure_ap(configs, channels_2g[i], channels_5g[i])
+            openwrt_ap.start_ap()
+            self.bssid_map.append(openwrt_ap.get_bssids_for_wifi_networks())
+
+            if mirror_ap:
+                openwrt_ap_mirror = self.openwrt_aps[i + 1]
+                openwrt_ap_mirror.configure_ap(
+                    configs, channels_2g[i + 1], channels_5g[i + 1]
+                )
+                openwrt_ap_mirror.start_ap()
+                self.bssid_map.append(openwrt_ap_mirror.get_bssids_for_wifi_networks())
+                break
diff --git a/src/antlion/test_utils/wifi/wifi_constants.py b/packages/antlion/test_utils/wifi/wifi_constants.py
similarity index 100%
rename from src/antlion/test_utils/wifi/wifi_constants.py
rename to packages/antlion/test_utils/wifi/wifi_constants.py
diff --git a/packages/antlion/test_utils/wifi/wifi_test_utils.py b/packages/antlion/test_utils/wifi/wifi_test_utils.py
new file mode 100755
index 0000000..9928001
--- /dev/null
+++ b/packages/antlion/test_utils/wifi/wifi_test_utils.py
@@ -0,0 +1,1074 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import os
+import shutil
+import time
+from enum import IntEnum
+from queue import Empty
+
+from mobly import asserts, signals
+
+from antlion import context, utils
+from antlion.controllers.ap_lib.hostapd_constants import BAND_2G, BAND_5G
+from antlion.test_utils.wifi import wifi_constants
+
+# Default timeout used for reboot, toggle WiFi and Airplane mode,
+# for the system to settle down after the operation.
+DEFAULT_TIMEOUT = 10
+# Number of seconds to wait for events that are supposed to happen quickly.
+# Like onSuccess for start background scan and confirmation on wifi state
+# change.
+SHORT_TIMEOUT = 30
+ROAMING_TIMEOUT = 30
+WIFI_CONNECTION_TIMEOUT_DEFAULT = 30
+DEFAULT_SCAN_TRIES = 3
+DEFAULT_CONNECT_TRIES = 3
+# Speed of light in m/s.
+SPEED_OF_LIGHT = 299792458
+
+DEFAULT_PING_ADDR = "https://www.google.com/robots.txt"
+
+CNSS_DIAG_CONFIG_PATH = "/data/vendor/wifi/cnss_diag/"
+CNSS_DIAG_CONFIG_FILE = "cnss_diag.conf"
+
+ROAMING_ATTN = {
+    "AP1_on_AP2_off": [0, 0, 95, 95],
+    "AP1_off_AP2_on": [95, 95, 0, 0],
+    "default": [0, 0, 0, 0],
+}
+
+
+class WifiEnums:
+    SSID_KEY = "SSID"  # Used for Wifi & SoftAp
+    SSID_PATTERN_KEY = "ssidPattern"
+    NETID_KEY = "network_id"
+    BSSID_KEY = "BSSID"  # Used for Wifi & SoftAp
+    BSSID_PATTERN_KEY = "bssidPattern"
+    PWD_KEY = "password"  # Used for Wifi & SoftAp
+    frequency_key = "frequency"
+    HIDDEN_KEY = "hiddenSSID"  # Used for Wifi & SoftAp
+    IS_APP_INTERACTION_REQUIRED = "isAppInteractionRequired"
+    IS_USER_INTERACTION_REQUIRED = "isUserInteractionRequired"
+    IS_SUGGESTION_METERED = "isMetered"
+    PRIORITY = "priority"
+    SECURITY = "security"  # Used for Wifi & SoftAp
+
+    # Used for SoftAp
+    AP_BAND_KEY = "apBand"
+    AP_CHANNEL_KEY = "apChannel"
+    AP_BANDS_KEY = "apBands"
+    AP_CHANNEL_FREQUENCYS_KEY = "apChannelFrequencies"
+    AP_MAC_RANDOMIZATION_SETTING_KEY = "MacRandomizationSetting"
+    AP_BRIDGED_OPPORTUNISTIC_SHUTDOWN_ENABLE_KEY = (
+        "BridgedModeOpportunisticShutdownEnabled"
+    )
+    AP_IEEE80211AX_ENABLED_KEY = "Ieee80211axEnabled"
+    AP_MAXCLIENTS_KEY = "MaxNumberOfClients"
+    AP_SHUTDOWNTIMEOUT_KEY = "ShutdownTimeoutMillis"
+    AP_SHUTDOWNTIMEOUTENABLE_KEY = "AutoShutdownEnabled"
+    AP_CLIENTCONTROL_KEY = "ClientControlByUserEnabled"
+    AP_ALLOWEDLIST_KEY = "AllowedClientList"
+    AP_BLOCKEDLIST_KEY = "BlockedClientList"
+
+    WIFI_CONFIG_SOFTAP_BAND_2G = 1
+    WIFI_CONFIG_SOFTAP_BAND_5G = 2
+    WIFI_CONFIG_SOFTAP_BAND_2G_5G = 3
+    WIFI_CONFIG_SOFTAP_BAND_6G = 4
+    WIFI_CONFIG_SOFTAP_BAND_2G_6G = 5
+    WIFI_CONFIG_SOFTAP_BAND_5G_6G = 6
+    WIFI_CONFIG_SOFTAP_BAND_ANY = 7
+
+    # DO NOT USE IT for new test case! Replaced by WIFI_CONFIG_SOFTAP_BAND_
+    WIFI_CONFIG_APBAND_2G = WIFI_CONFIG_SOFTAP_BAND_2G
+    WIFI_CONFIG_APBAND_5G = WIFI_CONFIG_SOFTAP_BAND_5G
+    WIFI_CONFIG_APBAND_AUTO = WIFI_CONFIG_SOFTAP_BAND_2G_5G
+
+    WIFI_CONFIG_APBAND_2G_OLD = 0
+    WIFI_CONFIG_APBAND_5G_OLD = 1
+    WIFI_CONFIG_APBAND_AUTO_OLD = -1
+
+    WIFI_WPS_INFO_PBC = 0
+    WIFI_WPS_INFO_DISPLAY = 1
+    WIFI_WPS_INFO_KEYPAD = 2
+    WIFI_WPS_INFO_LABEL = 3
+    WIFI_WPS_INFO_INVALID = 4
+
+    class CountryCode:
+        AUSTRALIA = "AU"
+        CHINA = "CN"
+        GERMANY = "DE"
+        JAPAN = "JP"
+        UK = "GB"
+        US = "US"
+        UNKNOWN = "UNKNOWN"
+
+    # Start of Macros for EAP
+    # EAP types
+    class Eap(IntEnum):
+        NONE = -1
+        PEAP = 0
+        TLS = 1
+        TTLS = 2
+        PWD = 3
+        SIM = 4
+        AKA = 5
+        AKA_PRIME = 6
+        UNAUTH_TLS = 7
+
+    # EAP Phase2 types
+    class EapPhase2(IntEnum):
+        NONE = 0
+        PAP = 1
+        MSCHAP = 2
+        MSCHAPV2 = 3
+        GTC = 4
+
+    class Enterprise:
+        # Enterprise Config Macros
+        EMPTY_VALUE = "NULL"
+        EAP = "eap"
+        PHASE2 = "phase2"
+        IDENTITY = "identity"
+        ANON_IDENTITY = "anonymous_identity"
+        PASSWORD = "password"
+        SUBJECT_MATCH = "subject_match"
+        ALTSUBJECT_MATCH = "altsubject_match"
+        DOM_SUFFIX_MATCH = "domain_suffix_match"
+        CLIENT_CERT = "client_cert"
+        CA_CERT = "ca_cert"
+        ENGINE = "engine"
+        ENGINE_ID = "engine_id"
+        PRIVATE_KEY_ID = "key_id"
+        REALM = "realm"
+        PLMN = "plmn"
+        FQDN = "FQDN"
+        FRIENDLY_NAME = "providerFriendlyName"
+        ROAMING_IDS = "roamingConsortiumIds"
+        OCSP = "ocsp"
+
+    # End of Macros for EAP
+
+    # Macros as specified in the WifiScanner code.
+    WIFI_BAND_UNSPECIFIED = 0  # not specified
+    WIFI_BAND_24_GHZ = 1  # 2.4 GHz band
+    WIFI_BAND_5_GHZ = 2  # 5 GHz band without DFS channels
+    WIFI_BAND_5_GHZ_DFS_ONLY = 4  # 5 GHz band with DFS channels
+    WIFI_BAND_5_GHZ_WITH_DFS = 6  # 5 GHz band with DFS channels
+    WIFI_BAND_BOTH = 3  # both bands without DFS channels
+    WIFI_BAND_BOTH_WITH_DFS = 7  # both bands with DFS channels
+
+    SCAN_TYPE_LOW_LATENCY = 0
+    SCAN_TYPE_LOW_POWER = 1
+    SCAN_TYPE_HIGH_ACCURACY = 2
+
+    # US Wifi frequencies
+    ALL_2G_FREQUENCIES = [
+        2412,
+        2417,
+        2422,
+        2427,
+        2432,
+        2437,
+        2442,
+        2447,
+        2452,
+        2457,
+        2462,
+    ]
+    DFS_5G_FREQUENCIES = [
+        5260,
+        5280,
+        5300,
+        5320,
+        5500,
+        5520,
+        5540,
+        5560,
+        5580,
+        5600,
+        5620,
+        5640,
+        5660,
+        5680,
+        5700,
+        5720,
+    ]
+    NONE_DFS_5G_FREQUENCIES = [5180, 5200, 5220, 5240, 5745, 5765, 5785, 5805, 5825]
+    ALL_5G_FREQUENCIES = DFS_5G_FREQUENCIES + NONE_DFS_5G_FREQUENCIES
+
+    band_to_frequencies = {
+        WIFI_BAND_24_GHZ: ALL_2G_FREQUENCIES,
+        WIFI_BAND_5_GHZ: NONE_DFS_5G_FREQUENCIES,
+        WIFI_BAND_5_GHZ_DFS_ONLY: DFS_5G_FREQUENCIES,
+        WIFI_BAND_5_GHZ_WITH_DFS: ALL_5G_FREQUENCIES,
+        WIFI_BAND_BOTH: ALL_2G_FREQUENCIES + NONE_DFS_5G_FREQUENCIES,
+        WIFI_BAND_BOTH_WITH_DFS: ALL_5G_FREQUENCIES + ALL_2G_FREQUENCIES,
+    }
+
+    # TODO: add all of the band mapping.
+    softap_band_frequencies = {
+        WIFI_CONFIG_SOFTAP_BAND_2G: ALL_2G_FREQUENCIES,
+        WIFI_CONFIG_SOFTAP_BAND_5G: ALL_5G_FREQUENCIES,
+    }
+
+    # All Wifi frequencies to channels lookup.
+    freq_to_channel = {
+        2412: 1,
+        2417: 2,
+        2422: 3,
+        2427: 4,
+        2432: 5,
+        2437: 6,
+        2442: 7,
+        2447: 8,
+        2452: 9,
+        2457: 10,
+        2462: 11,
+        2467: 12,
+        2472: 13,
+        2484: 14,
+        4915: 183,
+        4920: 184,
+        4925: 185,
+        4935: 187,
+        4940: 188,
+        4945: 189,
+        4960: 192,
+        4980: 196,
+        5035: 7,
+        5040: 8,
+        5045: 9,
+        5055: 11,
+        5060: 12,
+        5080: 16,
+        5170: 34,
+        5180: 36,
+        5190: 38,
+        5200: 40,
+        5210: 42,
+        5220: 44,
+        5230: 46,
+        5240: 48,
+        5260: 52,
+        5280: 56,
+        5300: 60,
+        5320: 64,
+        5500: 100,
+        5520: 104,
+        5540: 108,
+        5560: 112,
+        5580: 116,
+        5600: 120,
+        5620: 124,
+        5640: 128,
+        5660: 132,
+        5680: 136,
+        5700: 140,
+        5745: 149,
+        5765: 153,
+        5785: 157,
+        5795: 159,
+        5805: 161,
+        5825: 165,
+    }
+
+    # All Wifi channels to frequencies lookup.
+    channel_2G_to_freq = {
+        1: 2412,
+        2: 2417,
+        3: 2422,
+        4: 2427,
+        5: 2432,
+        6: 2437,
+        7: 2442,
+        8: 2447,
+        9: 2452,
+        10: 2457,
+        11: 2462,
+        12: 2467,
+        13: 2472,
+        14: 2484,
+    }
+
+    channel_5G_to_freq = {
+        183: 4915,
+        184: 4920,
+        185: 4925,
+        187: 4935,
+        188: 4940,
+        189: 4945,
+        192: 4960,
+        196: 4980,
+        7: 5035,
+        8: 5040,
+        9: 5045,
+        11: 5055,
+        12: 5060,
+        16: 5080,
+        34: 5170,
+        36: 5180,
+        38: 5190,
+        40: 5200,
+        42: 5210,
+        44: 5220,
+        46: 5230,
+        48: 5240,
+        50: 5250,
+        52: 5260,
+        56: 5280,
+        60: 5300,
+        64: 5320,
+        100: 5500,
+        104: 5520,
+        108: 5540,
+        112: 5560,
+        116: 5580,
+        120: 5600,
+        124: 5620,
+        128: 5640,
+        132: 5660,
+        136: 5680,
+        140: 5700,
+        149: 5745,
+        151: 5755,
+        153: 5765,
+        155: 5775,
+        157: 5785,
+        159: 5795,
+        161: 5805,
+        165: 5825,
+    }
+
+    channel_6G_to_freq = {4 * x + 1: 5955 + 20 * x for x in range(59)}
+
+    channel_to_freq = {
+        "2G": channel_2G_to_freq,
+        "5G": channel_5G_to_freq,
+        "6G": channel_6G_to_freq,
+    }
+
+
+def _assert_on_fail_handler(func, assert_on_fail, *args, **kwargs):
+    """Wrapper function that handles the bahevior of assert_on_fail.
+
+    When assert_on_fail is True, let all test signals through, which can
+    terminate test cases directly. When assert_on_fail is False, the wrapper
+    raises no test signals and reports operation status by returning True or
+    False.
+
+    Args:
+        func: The function to wrap. This function reports operation status by
+              raising test signals.
+        assert_on_fail: A boolean that specifies if the output of the wrapper
+                        is test signal based or return value based.
+        args: Positional args for func.
+        kwargs: Name args for func.
+
+    Returns:
+        If assert_on_fail is True, returns True/False to signal operation
+        status, otherwise return nothing.
+    """
+    try:
+        func(*args, **kwargs)
+        if not assert_on_fail:
+            return True
+    except signals.TestSignal:
+        if assert_on_fail:
+            raise
+        return False
+
+
+def match_networks(target_params, networks):
+    """Finds the WiFi networks that match a given set of parameters in a list
+    of WiFi networks.
+
+    To be considered a match, the network should contain every key-value pair
+    of target_params
+
+    Args:
+        target_params: A dict with 1 or more key-value pairs representing a Wi-Fi network.
+                       E.g { 'SSID': 'wh_ap1_5g', 'BSSID': '30:b5:c2:33:e4:47' }
+        networks: A list of dict objects representing WiFi networks.
+
+    Returns:
+        The networks that match the target parameters.
+    """
+    results = []
+    asserts.assert_true(
+        target_params, "Expected networks object 'target_params' is empty"
+    )
+    for n in networks:
+        add_network = 1
+        for k, v in target_params.items():
+            if k not in n:
+                add_network = 0
+                break
+            if n[k] != v:
+                add_network = 0
+                break
+        if add_network:
+            results.append(n)
+    return results
+
+
+def wifi_toggle_state(ad, new_state=None, assert_on_fail=True):
+    """Toggles the state of wifi.
+
+    Args:
+        ad: An AndroidDevice object.
+        new_state: Wifi state to set to. If None, opposite of the current state.
+        assert_on_fail: If True, error checks in this function will raise test
+                        failure signals.
+
+    Returns:
+        If assert_on_fail is False, function returns True if the toggle was
+        successful, False otherwise. If assert_on_fail is True, no return value.
+    """
+    return _assert_on_fail_handler(
+        _wifi_toggle_state, assert_on_fail, ad, new_state=new_state
+    )
+
+
+def _wifi_toggle_state(ad, new_state=None):
+    """Toggles the state of wifi.
+
+    TestFailure signals are raised when something goes wrong.
+
+    Args:
+        ad: An AndroidDevice object.
+        new_state: The state to set Wi-Fi to. If None, opposite of the current
+                   state will be set.
+    """
+    if new_state is None:
+        new_state = not ad.droid.wifiCheckState()
+    elif new_state == ad.droid.wifiCheckState():
+        # Check if the new_state is already achieved, so we don't wait for the
+        # state change event by mistake.
+        return
+    ad.droid.wifiStartTrackingStateChange()
+    ad.log.info("Setting Wi-Fi state to %s.", new_state)
+    ad.ed.clear_all_events()
+    # Setting wifi state.
+    ad.droid.wifiToggleState(new_state)
+    time.sleep(2)
+    fail_msg = f"Failed to set Wi-Fi state to {new_state} on {ad.serial}."
+    try:
+        ad.ed.wait_for_event(
+            wifi_constants.WIFI_STATE_CHANGED,
+            lambda x: x["data"]["enabled"] == new_state,
+            SHORT_TIMEOUT,
+        )
+    except Empty:
+        asserts.assert_equal(new_state, ad.droid.wifiCheckState(), fail_msg)
+    finally:
+        ad.droid.wifiStopTrackingStateChange()
+
+
+def reset_wifi(ad):
+    """Clears all saved Wi-Fi networks on a device.
+
+    This will turn Wi-Fi on.
+
+    Args:
+        ad: An AndroidDevice object.
+
+    """
+    networks = ad.droid.wifiGetConfiguredNetworks()
+    if not networks:
+        return
+    removed = []
+    for n in networks:
+        if n["networkId"] not in removed:
+            ad.droid.wifiForgetNetwork(n["networkId"])
+            removed.append(n["networkId"])
+        else:
+            continue
+        try:
+            event = ad.ed.pop_event(
+                wifi_constants.WIFI_FORGET_NW_SUCCESS, SHORT_TIMEOUT
+            )
+        except Empty:
+            logging.warning("Could not confirm the removal of network %s.", n)
+    # Check again to see if there's any network left.
+    asserts.assert_true(
+        not ad.droid.wifiGetConfiguredNetworks(),
+        f"Failed to remove these configured Wi-Fi networks: {networks}",
+    )
+
+
+def wifi_test_device_init(ad, country_code=WifiEnums.CountryCode.US):
+    """Initializes an android device for wifi testing.
+
+    0. Make sure SL4A connection is established on the android device.
+    1. Disable location service's WiFi scan.
+    2. Turn WiFi on.
+    3. Clear all saved networks.
+    4. Set country code to US.
+    5. Enable WiFi verbose logging.
+    6. Sync device time with computer time.
+    7. Turn off cellular data.
+    8. Turn off ambient display.
+    """
+    utils.require_sl4a((ad,))
+    ad.droid.wifiScannerToggleAlwaysAvailable(False)
+    msg = "Failed to turn off location service's scan."
+    asserts.assert_true(not ad.droid.wifiScannerIsAlwaysAvailable(), msg)
+    wifi_toggle_state(ad, True)
+    reset_wifi(ad)
+    ad.droid.wifiEnableVerboseLogging(1)
+    msg = "Failed to enable WiFi verbose logging."
+    asserts.assert_equal(ad.droid.wifiGetVerboseLoggingLevel(), 1, msg)
+    # We don't verify the following settings since they are not critical.
+    # Set wpa_supplicant log level to EXCESSIVE.
+    output = ad.adb.shell(
+        "wpa_cli -i wlan0 -p -g@android:wpa_wlan0 IFNAME=" "wlan0 log_level EXCESSIVE",
+        ignore_status=True,
+    )
+    ad.log.info("wpa_supplicant log change status: %s", output)
+    utils.sync_device_time(ad)
+    ad.droid.telephonyToggleDataConnection(False)
+    set_wifi_country_code(ad, country_code)
+    utils.set_ambient_display(ad, False)
+
+
+def set_wifi_country_code(ad, country_code):
+    """Sets the wifi country code on the device.
+
+    Args:
+        ad: An AndroidDevice object.
+        country_code: 2 letter ISO country code
+
+    Raises:
+        An RpcException if unable to set the country code.
+    """
+    try:
+        ad.adb.shell(f"cmd wifi force-country-code enabled {country_code}")
+    except Exception as e:
+        ad.log.warn(
+            f"Failed to set country code to {country_code}; defaulting to US. Error: {e}"
+        )
+        ad.droid.wifiSetCountryCode(WifiEnums.CountryCode.US)
+
+
+def start_wifi_connection_scan_and_return_status(ad):
+    """
+    Starts a wifi connection scan and wait for results to become available
+    or a scan failure to be reported.
+
+    Args:
+        ad: An AndroidDevice object.
+    Returns:
+        True: if scan succeeded & results are available
+        False: if scan failed
+    """
+    ad.ed.clear_all_events()
+    ad.droid.wifiStartScan()
+    try:
+        events = ad.ed.pop_events("WifiManagerScan(ResultsAvailable|Failure)", 60)
+    except Empty:
+        asserts.fail("Wi-Fi scan results/failure did not become available within 60s.")
+    # If there are multiple matches, we check for atleast one success.
+    for event in events:
+        if event["name"] == "WifiManagerScanResultsAvailable":
+            return True
+        elif event["name"] == "WifiManagerScanFailure":
+            ad.log.debug("Scan failure received")
+    return False
+
+
+def start_wifi_connection_scan_and_check_for_network(ad, network_ssid, max_tries=3):
+    """
+    Start connectivity scans & checks if the |network_ssid| is seen in
+    scan results. The method performs a max of |max_tries| connectivity scans
+    to find the network.
+
+    Args:
+        ad: An AndroidDevice object.
+        network_ssid: SSID of the network we are looking for.
+        max_tries: Number of scans to try.
+    Returns:
+        True: if network_ssid is found in scan results.
+        False: if network_ssid is not found in scan results.
+    """
+    start_time = time.time()
+    for num_tries in range(max_tries):
+        if start_wifi_connection_scan_and_return_status(ad):
+            scan_results = ad.droid.wifiGetScanResults()
+            match_results = match_networks(
+                {WifiEnums.SSID_KEY: network_ssid}, scan_results
+            )
+            if len(match_results) > 0:
+                ad.log.debug(f"Found network in {time.time() - start_time} seconds.")
+                return True
+    ad.log.debug(f"Did not find network in {time.time() - start_time} seconds.")
+    return False
+
+
+def start_wifi_connection_scan_and_ensure_network_found(ad, network_ssid, max_tries=3):
+    """
+    Start connectivity scans & ensure the |network_ssid| is seen in
+    scan results. The method performs a max of |max_tries| connectivity scans
+    to find the network.
+    This method asserts on failure!
+
+    Args:
+        ad: An AndroidDevice object.
+        network_ssid: SSID of the network we are looking for.
+        max_tries: Number of scans to try.
+    """
+    ad.log.info("Starting scans to ensure %s is present", network_ssid)
+    assert_msg = (
+        f"Failed to find {network_ssid} in scan results after {str(max_tries)} tries"
+    )
+    asserts.assert_true(
+        start_wifi_connection_scan_and_check_for_network(ad, network_ssid, max_tries),
+        assert_msg,
+    )
+
+
+def start_wifi_connection_scan_and_ensure_network_not_found(
+    ad, network_ssid, max_tries=3
+):
+    """
+    Start connectivity scans & ensure the |network_ssid| is not seen in
+    scan results. The method performs a max of |max_tries| connectivity scans
+    to find the network.
+    This method asserts on failure!
+
+    Args:
+        ad: An AndroidDevice object.
+        network_ssid: SSID of the network we are looking for.
+        max_tries: Number of scans to try.
+    """
+    ad.log.info("Starting scans to ensure %s is not present", network_ssid)
+    assert_msg = f"Found {network_ssid} in scan results after {str(max_tries)} tries"
+    asserts.assert_false(
+        start_wifi_connection_scan_and_check_for_network(ad, network_ssid, max_tries),
+        assert_msg,
+    )
+
+
+def _wait_for_connect_event(ad, ssid=None, id=None, tries=1):
+    """Wait for a connect event on queue and pop when available.
+
+    Args:
+        ad: An Android device object.
+        ssid: SSID of the network to connect to.
+        id: Network Id of the network to connect to.
+        tries: An integer that is the number of times to try before failing.
+
+    Returns:
+        A dict with details of the connection data, which looks like this:
+        {
+         'time': 1485460337798,
+         'name': 'WifiNetworkConnected',
+         'data': {
+                  'rssi': -27,
+                  'is_24ghz': True,
+                  'mac_address': '02:00:00:00:00:00',
+                  'network_id': 1,
+                  'BSSID': '30:b5:c2:33:d3:fc',
+                  'ip_address': 117483712,
+                  'link_speed': 54,
+                  'supplicant_state': 'completed',
+                  'hidden_ssid': False,
+                  'SSID': 'wh_ap1_2g',
+                  'is_5ghz': False}
+        }
+
+    """
+    conn_result = None
+
+    # If ssid and network id is None, just wait for any connect event.
+    if id is None and ssid is None:
+        for i in range(tries):
+            try:
+                conn_result = ad.ed.pop_event(wifi_constants.WIFI_CONNECTED, 30)
+                break
+            except Empty:
+                pass
+    else:
+        # If ssid or network id is specified, wait for specific connect event.
+        for i in range(tries):
+            try:
+                conn_result = ad.ed.pop_event(wifi_constants.WIFI_CONNECTED, 30)
+                if id and conn_result["data"][WifiEnums.NETID_KEY] == id:
+                    break
+                elif ssid and conn_result["data"][WifiEnums.SSID_KEY] == ssid:
+                    break
+            except Empty:
+                pass
+
+    return conn_result
+
+
+def connect_to_wifi_network(
+    ad,
+    network,
+    assert_on_fail=True,
+    check_connectivity=True,
+    hidden=False,
+    num_of_scan_tries=DEFAULT_SCAN_TRIES,
+    num_of_connect_tries=DEFAULT_CONNECT_TRIES,
+):
+    """Connection logic for open and psk wifi networks.
+
+    Args:
+        ad: AndroidDevice to use for connection
+        network: network info of the network to connect to
+        assert_on_fail: If true, errors from wifi_connect will raise
+                        test failure signals.
+        hidden: Is the Wifi network hidden.
+        num_of_scan_tries: The number of times to try scan
+                           interface before declaring failure.
+        num_of_connect_tries: The number of times to try
+                              connect wifi before declaring failure.
+    """
+    if hidden:
+        start_wifi_connection_scan_and_ensure_network_not_found(
+            ad, network[WifiEnums.SSID_KEY], max_tries=num_of_scan_tries
+        )
+    else:
+        start_wifi_connection_scan_and_ensure_network_found(
+            ad, network[WifiEnums.SSID_KEY], max_tries=num_of_scan_tries
+        )
+    wifi_connect(
+        ad,
+        network,
+        num_of_tries=num_of_connect_tries,
+        assert_on_fail=assert_on_fail,
+        check_connectivity=check_connectivity,
+    )
+
+
+def wifi_connect(
+    ad, network, num_of_tries=1, assert_on_fail=True, check_connectivity=True
+):
+    """Connect an Android device to a wifi network.
+
+    Initiate connection to a wifi network, wait for the "connected" event, then
+    confirm the connected ssid is the one requested.
+
+    This will directly fail a test if anything goes wrong.
+
+    Args:
+        ad: android_device object to initiate connection on.
+        network: A dictionary representing the network to connect to. The
+                 dictionary must have the key "SSID".
+        num_of_tries: An integer that is the number of times to try before
+                      delaring failure. Default is 1.
+        assert_on_fail: If True, error checks in this function will raise test
+                        failure signals.
+
+    Returns:
+        Returns a value only if assert_on_fail is false.
+        Returns True if the connection was successful, False otherwise.
+    """
+    return _assert_on_fail_handler(
+        _wifi_connect,
+        assert_on_fail,
+        ad,
+        network,
+        num_of_tries=num_of_tries,
+        check_connectivity=check_connectivity,
+    )
+
+
+def _wifi_connect(ad, network, num_of_tries=1, check_connectivity=True):
+    """Connect an Android device to a wifi network.
+
+    Initiate connection to a wifi network, wait for the "connected" event, then
+    confirm the connected ssid is the one requested.
+
+    This will directly fail a test if anything goes wrong.
+
+    Args:
+        ad: android_device object to initiate connection on.
+        network: A dictionary representing the network to connect to. The
+                 dictionary must have the key "SSID".
+        num_of_tries: An integer that is the number of times to try before
+                      delaring failure. Default is 1.
+    """
+    asserts.assert_true(
+        WifiEnums.SSID_KEY in network,
+        f"Key '{WifiEnums.SSID_KEY}' must be present in network definition.",
+    )
+    ad.droid.wifiStartTrackingStateChange()
+    expected_ssid = network[WifiEnums.SSID_KEY]
+    ad.droid.wifiConnectByConfig(network)
+    ad.log.info("Starting connection process to %s", expected_ssid)
+    try:
+        ad.ed.pop_event(wifi_constants.CONNECT_BY_CONFIG_SUCCESS, 30)
+        connect_result = _wait_for_connect_event(
+            ad, ssid=expected_ssid, tries=num_of_tries
+        )
+        asserts.assert_true(
+            connect_result,
+            f"Failed to connect to Wi-Fi network {network} on {ad.serial}",
+        )
+        ad.log.debug("Wi-Fi connection result: %s.", connect_result)
+        actual_ssid = connect_result["data"][WifiEnums.SSID_KEY]
+        asserts.assert_equal(
+            actual_ssid,
+            expected_ssid,
+            f"Connected to the wrong network on {ad.serial}.",
+        )
+        ad.log.info("Connected to Wi-Fi network %s.", actual_ssid)
+
+        if check_connectivity:
+            internet = validate_connection(ad, DEFAULT_PING_ADDR)
+            if not internet:
+                raise signals.TestFailure(
+                    f"Failed to connect to internet on {expected_ssid}"
+                )
+    except Empty:
+        asserts.fail(f"Failed to start connection process to {network} on {ad.serial}")
+    except Exception as error:
+        ad.log.error("Failed to connect to %s with error %s", expected_ssid, error)
+        raise signals.TestFailure(f"Failed to connect to {network} network")
+
+    finally:
+        ad.droid.wifiStopTrackingStateChange()
+
+
+def validate_connection(
+    ad, ping_addr=DEFAULT_PING_ADDR, wait_time=15, ping_gateway=True
+):
+    """Validate internet connection by pinging the address provided.
+
+    Args:
+        ad: android_device object.
+        ping_addr: address on internet for pinging.
+        wait_time: wait for some time before validating connection
+
+    Returns:
+        ping output if successful, NULL otherwise.
+    """
+    android_version = int(ad.adb.shell("getprop ro.vendor.build.version.release"))
+    # wait_time to allow for DHCP to complete.
+    for i in range(wait_time):
+        if ad.droid.connectivityNetworkIsConnected():
+            if (
+                android_version > 10 and ad.droid.connectivityGetIPv4DefaultGateway()
+            ) or android_version < 11:
+                break
+        time.sleep(1)
+    ping = False
+    try:
+        ping = ad.droid.httpPing(ping_addr)
+        ad.log.info("Http ping result: %s.", ping)
+    except:
+        pass
+    if android_version > 10 and not ping and ping_gateway:
+        ad.log.info("Http ping failed. Pinging default gateway")
+        gw = ad.droid.connectivityGetIPv4DefaultGateway()
+        result = ad.adb.shell(f"ping -c 6 {gw}")
+        ad.log.info(f"Default gateway ping result: {result}")
+        ping = False if "100% packet loss" in result else True
+    return ping
+
+
+# TODO(angli): This can only verify if an actual value is exactly the same.
+# Would be nice to be able to verify an actual value is one of serveral.
+def verify_wifi_connection_info(ad, expected_con):
+    """Verifies that the information of the currently connected wifi network is
+    as expected.
+
+    Args:
+        expected_con: A dict representing expected key-value pairs for wifi
+            connection. e.g. {"SSID": "test_wifi"}
+    """
+    current_con = ad.droid.wifiGetConnectionInfo()
+    case_insensitive = ["BSSID", "supplicant_state"]
+    ad.log.debug("Current connection: %s", current_con)
+    for k, expected_v in expected_con.items():
+        # Do not verify authentication related fields.
+        if k == "password":
+            continue
+        msg = f"Field {k} does not exist in wifi connection info {current_con}."
+        if k not in current_con:
+            raise signals.TestFailure(msg)
+        actual_v = current_con[k]
+        if k in case_insensitive:
+            actual_v = actual_v.lower()
+            expected_v = expected_v.lower()
+        msg = f"Expected {k} to be {expected_v}, actual {k} is {actual_v}."
+        if actual_v != expected_v:
+            raise signals.TestFailure(msg)
+
+
+def get_current_softap_capability(ad, callbackId, need_to_wait):
+    """pop up all of softap info list changed event from queue.
+    Args:
+        callbackId: Id of the callback associated with registering.
+        need_to_wait: Wait for the info callback event before pop all.
+    Returns:
+        Returns last updated capability of softap.
+    """
+    eventStr = (
+        wifi_constants.SOFTAP_CALLBACK_EVENT
+        + str(callbackId)
+        + wifi_constants.SOFTAP_CAPABILITY_CHANGED
+    )
+    ad.log.debug("softap capability dump from eventStr %s", eventStr)
+    if need_to_wait:
+        event = ad.ed.pop_event(eventStr, SHORT_TIMEOUT)
+        capability = event["data"]
+
+    events = ad.ed.pop_all(eventStr)
+    for event in events:
+        capability = event["data"]
+
+    return capability
+
+
+def get_ssrdumps(ad):
+    """Pulls dumps in the ssrdump dir
+    Args:
+        ad: android device object.
+    """
+    logs = ad.get_file_names("/data/vendor/ssrdump/")
+    if logs:
+        ad.log.info("Pulling ssrdumps %s", logs)
+        log_path = os.path.join(ad.device_log_path, f"SSRDUMPS_{ad.serial}")
+        os.makedirs(log_path, exist_ok=True)
+        ad.pull_files(logs, log_path)
+    ad.adb.shell("find /data/vendor/ssrdump/ -type f -delete", ignore_status=True)
+
+
+def start_pcap(pcap, wifi_band, test_name):
+    """Start packet capture in monitor mode.
+
+    Args:
+        pcap: packet capture object
+        wifi_band: '2g' or '5g' or 'dual'
+        test_name: test name to be used for pcap file name
+
+    Returns:
+        Dictionary with wifi band as key and the tuple
+        (pcap Process object, log directory) as the value
+    """
+    log_dir = os.path.join(
+        context.get_current_context().get_full_output_path(), "PacketCapture"
+    )
+    os.makedirs(log_dir, exist_ok=True)
+    if wifi_band == "dual":
+        bands = [BAND_2G, BAND_5G]
+    else:
+        bands = [wifi_band]
+    procs = {}
+    for band in bands:
+        proc = pcap.start_packet_capture(band, log_dir, test_name)
+        procs[band] = (proc, os.path.join(log_dir, test_name))
+    return procs
+
+
+def stop_pcap(pcap, procs, test_status=None):
+    """Stop packet capture in monitor mode.
+
+    Since, the pcap logs in monitor mode can be very large, we will
+    delete them if they are not required. 'test_status' if True, will delete
+    the pcap files. If False, we will keep them.
+
+    Args:
+        pcap: packet capture object
+        procs: dictionary returned by start_pcap
+        test_status: status of the test case
+    """
+    for proc, fname in procs.values():
+        pcap.stop_packet_capture(proc)
+
+    if test_status:
+        shutil.rmtree(os.path.dirname(fname))
+
+
+def start_cnss_diags(ads, cnss_diag_file, pixel_models):
+    for ad in ads:
+        start_cnss_diag(ad, cnss_diag_file, pixel_models)
+
+
+def start_cnss_diag(ad, cnss_diag_file, pixel_models):
+    """Start cnss_diag to record extra wifi logs
+
+    Args:
+        ad: android device object.
+        cnss_diag_file: cnss diag config file to push to device.
+        pixel_models: pixel devices.
+    """
+    if ad.model not in pixel_models:
+        ad.log.info("Device not supported to collect pixel logger")
+        return
+    if ad.model in wifi_constants.DEVICES_USING_LEGACY_PROP:
+        prop = wifi_constants.LEGACY_CNSS_DIAG_PROP
+    else:
+        prop = wifi_constants.CNSS_DIAG_PROP
+    if ad.adb.getprop(prop) != "true":
+        if not int(
+            ad.adb.shell(
+                f"ls -l {CNSS_DIAG_CONFIG_PATH}{CNSS_DIAG_CONFIG_FILE} | wc -l"
+            )
+        ):
+            ad.adb.push(f"{cnss_diag_file} {CNSS_DIAG_CONFIG_PATH}")
+        ad.adb.shell(
+            "find /data/vendor/wifi/cnss_diag/wlan_logs/ -type f -delete",
+            ignore_status=True,
+        )
+        ad.adb.shell(f"setprop {prop} true", ignore_status=True)
+
+
+def stop_cnss_diags(ads, pixel_models):
+    for ad in ads:
+        stop_cnss_diag(ad, pixel_models)
+
+
+def stop_cnss_diag(ad, pixel_models):
+    """Stops cnss_diag
+
+    Args:
+        ad: android device object.
+        pixel_models: pixel devices.
+    """
+    if ad.model not in pixel_models:
+        ad.log.info("Device not supported to collect pixel logger")
+        return
+    if ad.model in wifi_constants.DEVICES_USING_LEGACY_PROP:
+        prop = wifi_constants.LEGACY_CNSS_DIAG_PROP
+    else:
+        prop = wifi_constants.CNSS_DIAG_PROP
+    ad.adb.shell(f"setprop {prop} false", ignore_status=True)
+
+
+def get_cnss_diag_log(ad):
+    """Pulls the cnss_diag logs in the wlan_logs dir
+    Args:
+        ad: android device object.
+    """
+    logs = ad.get_file_names("/data/vendor/wifi/cnss_diag/wlan_logs/")
+    if logs:
+        ad.log.info("Pulling cnss_diag logs %s", logs)
+        log_path = os.path.join(ad.device_log_path, f"CNSS_DIAG_{ad.serial}")
+        os.makedirs(log_path, exist_ok=True)
+        ad.pull_files(logs, log_path)
+
+
+def turn_location_off_and_scan_toggle_off(ad):
+    """Turns off wifi location scans."""
+    utils.set_location_service(ad, False)
+    ad.droid.wifiScannerToggleAlwaysAvailable(False)
+    msg = "Failed to turn off location service's scan."
+    asserts.assert_true(not ad.droid.wifiScannerIsAlwaysAvailable(), msg)
diff --git a/packages/antlion/types.py b/packages/antlion/types.py
new file mode 100644
index 0000000..0ccc7b2
--- /dev/null
+++ b/packages/antlion/types.py
@@ -0,0 +1,75 @@
+#!/usr/bin/env python3
+#
+# Copyright 2024 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import Protocol, TypeAlias, TypeVar
+
+Json: TypeAlias = dict[str, "Json"] | list["Json"] | str | int | float | bool | None
+"""JSON serializable data."""
+
+ControllerConfig: TypeAlias = dict[str, Json]
+"""Mobly configuration specific to a controller.
+
+Defined in the Mobly config under TestBeds -> Controllers ->
+<MOBLY_CONTROLLER_CONFIG_NAME>.
+"""
+
+_T = TypeVar("_T")
+
+
+class Controller(Protocol[_T]):
+    MOBLY_CONTROLLER_CONFIG_NAME: str
+    """Key used to get this controller's config from the Mobly config."""
+
+    def create(self, configs: list[ControllerConfig]) -> list[_T]:
+        """Create controller objects from configurations.
+
+        Args:
+            configs: A list of serialized data like string/dict. Each element of
+                the list is a configuration for a controller object.
+
+        Returns:
+          A list of controller objects.
+        """
+
+    def destroy(self, objects: list[_T]) -> None:
+        """Destroys controller objects.
+
+        Each controller object shall be properly cleaned up and all the
+        resources held should be released, e.g. memory allocation, sockets, file
+        handlers etc.
+
+        Args:
+            objects: A list of controller objects created by the create
+                function.
+        """
+
+    def get_info(self, objects: list[_T]) -> list[Json]:
+        """Gets info from the controller objects.
+
+        The info will be included in test_summary.yaml under the key
+        'ControllerInfo'. Such information could include unique ID, version, or
+        anything that could be useful for describing the test bed and debugging.
+
+        Args:
+            objects: A list of controller objects created by the create
+                function.
+
+        Returns:
+            A list of json serializable objects: each represents the info of a
+            controller object. The order of the info object should follow that
+            of the input objects.
+        """
+        return []
diff --git a/src/antlion/unit_tests/__init__.py b/packages/antlion/unit_tests/__init__.py
similarity index 100%
rename from src/antlion/unit_tests/__init__.py
rename to packages/antlion/unit_tests/__init__.py
diff --git a/src/antlion/unit_tests/acts_adb_test.py b/packages/antlion/unit_tests/acts_adb_test.py
similarity index 95%
rename from src/antlion/unit_tests/acts_adb_test.py
rename to packages/antlion/unit_tests/acts_adb_test.py
index 05a51bc..c7a14bc 100755
--- a/src/antlion/unit_tests/acts_adb_test.py
+++ b/packages/antlion/unit_tests/acts_adb_test.py
@@ -15,10 +15,11 @@
 # limitations under the License.
 
 import unittest
+
 import mock
+
 from antlion.controllers import adb
-from antlion.controllers.adb_lib.error import AdbCommandError
-from antlion.controllers.adb_lib.error import AdbError
+from antlion.controllers.adb_lib.error import AdbCommandError, AdbError
 
 
 class MockJob(object):
@@ -95,7 +96,7 @@
         proxy = MockAdbProxy()
         expected_version_number = 39
         proxy.version = lambda: (
-            "Android Debug Bridge version 1.0.%s\nblah" % expected_version_number
+            f"Android Debug Bridge version 1.0.{expected_version_number}\nblah"
         )
         self.assertEqual(expected_version_number, proxy.get_version_number())
 
diff --git a/src/antlion/unit_tests/acts_android_device_test.py b/packages/antlion/unit_tests/acts_android_device_test.py
similarity index 97%
rename from src/antlion/unit_tests/acts_android_device_test.py
rename to packages/antlion/unit_tests/acts_android_device_test.py
index dd2ecd3..500c12a 100755
--- a/src/antlion/unit_tests/acts_android_device_test.py
+++ b/packages/antlion/unit_tests/acts_android_device_test.py
@@ -15,13 +15,14 @@
 # limitations under the License.
 
 import logging
-import mock
 import os
 import shutil
 import tempfile
 import unittest
 
-from antlion import logger
+import mock
+from mobly import logger
+
 from antlion.controllers import android_device
 from antlion.controllers.android_lib import errors
 
@@ -30,7 +31,7 @@
 
 # Mock start and end time of the adb cat.
 MOCK_ADB_EPOCH_BEGIN_TIME = 191000123
-MOCK_ADB_LOGCAT_BEGIN_TIME = logger.normalize_log_line_timestamp(
+MOCK_ADB_LOGCAT_BEGIN_TIME = logger.sanitize_filename(
     logger.epoch_to_log_line_timestamp(MOCK_ADB_EPOCH_BEGIN_TIME)
 )
 MOCK_ADB_LOGCAT_END_TIME = "1970-01-02 21:22:02.000"
@@ -114,19 +115,19 @@
             return "1"
 
     def devices(self):
-        return "\t".join([str(self.serial), "device"])
+        return f"{str(self.serial)}\tdevice"
 
     def bugreport(self, params, timeout=android_device.BUG_REPORT_TIMEOUT):
         expected = os.path.join(
             logging.log_path,
-            "AndroidDevice%s" % self.serial,
+            f"AndroidDevice{self.serial}",
             "AndroidDevice%s_%s.txt"
             % (
                 self.serial,
                 logger.normalize_log_line_timestamp(MOCK_ADB_LOGCAT_BEGIN_TIME),
             ),
         )
-        assert expected in params, "Expected '%s', got '%s'." % (expected, params)
+        assert expected in params, f"Expected '{expected}', got '{params}'."
 
     def __getattr__(self, name):
         """All calls to the none-existent functions in adb proxy would
@@ -265,7 +266,7 @@
         self.assertEqual(ad.serial, 1)
         self.assertEqual(ad.model, "fakemodel")
         self.assertIsNone(ad.adb_logcat_process)
-        expected_lp = os.path.join(logging.log_path, "AndroidDevice%s" % MOCK_SERIAL)
+        expected_lp = os.path.join(logging.log_path, f"AndroidDevice{MOCK_SERIAL}")
         self.assertEqual(ad.log_path, expected_lp)
 
     @mock.patch(
@@ -338,7 +339,7 @@
         """
         ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
         mock_log_path.return_value = os.path.join(
-            logging.log_path, "AndroidDevice%s" % ad.serial
+            logging.log_path, f"AndroidDevice{ad.serial}"
         )
         ad.take_bug_report("test_something", 234325.32)
         mock_makedirs.assert_called_with(mock_log_path(), exist_ok=True)
@@ -363,7 +364,7 @@
         """
         ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
         mock_log_path.return_value = os.path.join(
-            logging.log_path, "AndroidDevice%s" % ad.serial
+            logging.log_path, f"AndroidDevice{ad.serial}"
         )
         expected_msg = "Failed to take bugreport on 1: OMG I died!"
         with self.assertRaisesRegex(errors.AndroidDeviceError, expected_msg):
@@ -391,7 +392,7 @@
         """
         ad = android_device.AndroidDevice(serial=MOCK_SERIAL)
         mock_log_path.return_value = os.path.join(
-            logging.log_path, "AndroidDevice%s" % ad.serial
+            logging.log_path, f"AndroidDevice{ad.serial}"
         )
         ad.take_bug_report("test_something", MOCK_ADB_EPOCH_BEGIN_TIME)
         mock_makedirs.assert_called_with(mock_log_path(), exist_ok=True)
@@ -422,7 +423,7 @@
             ad.start_adb_logcat()
             # Verify start did the correct operations.
             self.assertTrue(ad.adb_logcat_process)
-            log_dir = "AndroidDevice%s" % ad.serial
+            log_dir = f"AndroidDevice{ad.serial}"
             create_proc_mock.assert_called_with(ad.serial, log_dir, "-b all")
             proc_mock.start.assert_called_with()
             # Expect warning msg if start is called back to back.
@@ -453,7 +454,7 @@
         ad.start_adb_logcat()
         # Verify that create_logcat_keepalive_process is called with the
         # correct command.
-        log_dir = "AndroidDevice%s" % ad.serial
+        log_dir = f"AndroidDevice{ad.serial}"
         create_proc_mock.assert_called_with(ad.serial, log_dir, "-b radio")
 
     @mock.patch(
diff --git a/src/antlion/unit_tests/acts_asserts_test.py b/packages/antlion/unit_tests/acts_asserts_test.py
similarity index 94%
rename from src/antlion/unit_tests/acts_asserts_test.py
rename to packages/antlion/unit_tests/acts_asserts_test.py
index e11e120..8a87d8f 100755
--- a/src/antlion/unit_tests/acts_asserts_test.py
+++ b/packages/antlion/unit_tests/acts_asserts_test.py
@@ -16,9 +16,7 @@
 
 import unittest
 
-from antlion import signals
-
-from mobly import asserts
+from mobly import asserts, signals
 
 MSG_EXPECTED_EXCEPTION = "This is an expected exception."
 
diff --git a/src/antlion/unit_tests/acts_confidence_test_config.json b/packages/antlion/unit_tests/acts_confidence_test_config.json
similarity index 66%
rename from src/antlion/unit_tests/acts_confidence_test_config.json
rename to packages/antlion/unit_tests/acts_confidence_test_config.json
index 566beba..6a64b7c 100644
--- a/src/antlion/unit_tests/acts_confidence_test_config.json
+++ b/packages/antlion/unit_tests/acts_confidence_test_config.json
@@ -1,15 +1,18 @@
 {
-    "testbed":
-    [
+    "testbed": [
         {
             "_description": "ACTS confidence test bed, no device needed.",
             "name": "Confidence",
             "icecream": 42,
-            "MagicDevice": ["Magic!"]
+            "MagicDevice": [
+                "Magic!"
+            ]
         }
     ],
     "logpath": "/tmp/logs",
-    "testpaths": ["./"],
+    "testpaths": [
+        "./"
+    ],
     "icecream": "mememe",
     "extra_param": "haha"
 }
diff --git a/src/antlion/unit_tests/acts_context_test.py b/packages/antlion/unit_tests/acts_context_test.py
similarity index 78%
rename from src/antlion/unit_tests/acts_context_test.py
rename to packages/antlion/unit_tests/acts_context_test.py
index 5fe4f34..0634826 100755
--- a/src/antlion/unit_tests/acts_context_test.py
+++ b/packages/antlion/unit_tests/acts_context_test.py
@@ -15,26 +15,26 @@
 # limitations under the License.
 
 import unittest
-from functools import partial
 from unittest import TestCase
 
+from mock import Mock, patch
+
 from antlion import context
-from antlion.context import RootContext
-from antlion.context import TestCaseContext
-from antlion.context import TestClassContext
-from antlion.context import TestContext
-from antlion.context import _update_test_case_context
-from antlion.context import _update_test_class_context
-from antlion.context import get_context_for_event
-from antlion.context import get_current_context
-from antlion.event.event import TestCaseBeginEvent
-from antlion.event.event import TestCaseEndEvent
-from antlion.event.event import TestCaseEvent
-from antlion.event.event import TestClassBeginEvent
-from antlion.event.event import TestClassEndEvent
-from antlion.event.event import TestClassEvent
-from mock import Mock
-from mock import patch
+from antlion.context import (
+    RootContext,
+    TestCaseContext,
+    TestClassContext,
+    TestContext,
+    _update_test_case_context,
+    _update_test_class_context,
+    get_current_context,
+)
+from antlion.event.event import (
+    TestCaseBeginEvent,
+    TestCaseEndEvent,
+    TestClassBeginEvent,
+    TestClassEndEvent,
+)
 
 LOGGING = "antlion.context.logging"
 
@@ -53,29 +53,6 @@
 class ModuleTest(TestCase):
     """Unit tests for the context module."""
 
-    def test_get_context_for_event_for_test_case(self):
-        event = Mock(spec=TestCaseEvent)
-        event.test_class = Mock()
-        event.test_case = Mock()
-        context = get_context_for_event(event)
-
-        self.assertIsInstance(context, TestCaseContext)
-        self.assertEqual(context.test_class, event.test_class)
-        self.assertEqual(context.test_case, event.test_case)
-
-    def test_get_context_for_event_for_test_class(self):
-        event = Mock(spec=TestClassEvent)
-        event.test_class = Mock()
-        context = get_context_for_event(event)
-
-        self.assertIsInstance(context, TestClassContext)
-        self.assertEqual(context.test_class, event.test_class)
-
-    def test_get_context_for_unknown_event_type(self):
-        event = Mock()
-
-        self.assertRaises(TypeError, partial(get_context_for_event, event))
-
     def test_update_test_class_context_for_test_class_begin(self):
         event = Mock(spec=TestClassBeginEvent)
         event.test_class = Mock()
@@ -233,14 +210,14 @@
         test_class = TestClass()
         context = TestCaseContext(test_class, TEST_CASE)
 
-        context_dir = TestClass.__name__ + "/" + TEST_CASE
+        context_dir = f"{TestClass.__name__}/{TEST_CASE}"
         self.assertEqual(context._get_default_context_dir(), context_dir)
 
     def test_identifier_is_class_and_test_case_name(self):
         test_class = TestClass()
         context = TestCaseContext(test_class, TEST_CASE)
 
-        identifier = TestClass.__name__ + "." + TEST_CASE
+        identifier = f"{TestClass.__name__}.{TEST_CASE}"
         self.assertEqual(context.identifier, identifier)
 
 
diff --git a/src/antlion/unit_tests/acts_error_test.py b/packages/antlion/unit_tests/acts_error_test.py
similarity index 100%
rename from src/antlion/unit_tests/acts_error_test.py
rename to packages/antlion/unit_tests/acts_error_test.py
diff --git a/src/antlion/unit_tests/acts_import_unit_test.py b/packages/antlion/unit_tests/acts_import_unit_test.py
similarity index 94%
rename from src/antlion/unit_tests/acts_import_unit_test.py
rename to packages/antlion/unit_tests/acts_import_unit_test.py
index 55a340f..b7505bf 100755
--- a/src/antlion/unit_tests/acts_import_unit_test.py
+++ b/packages/antlion/unit_tests/acts_import_unit_test.py
@@ -31,10 +31,6 @@
 
 PY_FILE_REGEX = re.compile(".+\.py$")
 
-DENYLIST = [
-    "antlion/controllers/packet_sender.py",
-]
-
 DENYLIST_DIRECTORIES = []
 
 
@@ -67,7 +63,7 @@
                 path = os.path.relpath(os.path.join(root, f), os.getcwd())
 
                 if PY_FILE_REGEX.match(full_path):
-                    with self.subTest(msg="import %s" % path):
+                    with self.subTest(msg=f"import {path}"):
                         fake_module_name = str(uuid.uuid4())
                         module = import_module(fake_module_name, path)
                         self.assertIsNotNone(module)
diff --git a/src/antlion/unit_tests/acts_job_test.py b/packages/antlion/unit_tests/acts_job_test.py
similarity index 94%
rename from src/antlion/unit_tests/acts_job_test.py
rename to packages/antlion/unit_tests/acts_job_test.py
index a900d73..a79d2f7 100755
--- a/src/antlion/unit_tests/acts_job_test.py
+++ b/packages/antlion/unit_tests/acts_job_test.py
@@ -14,12 +14,14 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import mock
 import os
 import sys
 import unittest
 
+import mock
+
 from antlion.libs.proc import job
+from antlion.runner import CalledProcessError
 
 if os.name == "posix" and sys.version_info[0] < 3:
     import subprocess32 as subprocess
@@ -74,7 +76,7 @@
     )
     def test_run_error(self, popen):
         """Test that we raise on non-zero exit statuses."""
-        self.assertRaises(job.Error, job.run, "exit 1")
+        self.assertRaises(CalledProcessError, job.run, "exit 1")
 
     @mock.patch(
         "antlion.libs.proc.job.subprocess.Popen", return_value=FakePopen(returncode=1)
@@ -90,7 +92,7 @@
     )
     def test_run_timeout(self, popen):
         """Test that we correctly implement command timeouts."""
-        self.assertRaises(job.Error, job.run, "sleep 5", timeout=0.1)
+        self.assertRaises(CalledProcessError, job.run, "sleep 5", timeout_sec=0.1)
 
     @mock.patch(
         "antlion.libs.proc.job.subprocess.Popen",
diff --git a/src/antlion/unit_tests/acts_logger_test.py b/packages/antlion/unit_tests/acts_logger_test.py
similarity index 97%
rename from src/antlion/unit_tests/acts_logger_test.py
rename to packages/antlion/unit_tests/acts_logger_test.py
index f46e79a..61e1c35 100755
--- a/src/antlion/unit_tests/acts_logger_test.py
+++ b/packages/antlion/unit_tests/acts_logger_test.py
@@ -14,11 +14,11 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import unittest
-
-from antlion import logger
 import os
 import time
+import unittest
+
+from mobly import logger
 
 
 class ActsLoggerTest(unittest.TestCase):
diff --git a/src/antlion/unit_tests/acts_sanity_test_config.json b/packages/antlion/unit_tests/acts_sanity_test_config.json
similarity index 65%
rename from src/antlion/unit_tests/acts_sanity_test_config.json
rename to packages/antlion/unit_tests/acts_sanity_test_config.json
index d2a1d74..e721333 100644
--- a/src/antlion/unit_tests/acts_sanity_test_config.json
+++ b/packages/antlion/unit_tests/acts_sanity_test_config.json
@@ -1,15 +1,18 @@
 {
-    "testbed":
-    [
+    "testbed": [
         {
             "_description": "ACTS sanity test bed, no device needed.",
             "name": "Sanity",
             "icecream": 42,
-            "MagicDevice": ["Magic!"]
+            "MagicDevice": [
+                "Magic!"
+            ]
         }
     ],
     "logpath": "/tmp/logs",
-    "testpaths": ["./"],
+    "testpaths": [
+        "./"
+    ],
     "icecream": "mememe",
     "extra_param": "haha"
 }
diff --git a/packages/antlion/unit_tests/acts_sniffer_test_config.json b/packages/antlion/unit_tests/acts_sniffer_test_config.json
new file mode 100644
index 0000000..9e04d34
--- /dev/null
+++ b/packages/antlion/unit_tests/acts_sniffer_test_config.json
@@ -0,0 +1,22 @@
+{
+    "testbed": [
+        {
+            "_description": "ACTS sniffer sanity test bed, no device needed.",
+            "name": "SnifferSanity",
+            "Sniffer": [
+                {
+                    "Type": "local",
+                    "SubType": "tcpdump",
+                    "Interface": "wlan0",
+                    "BaseConfigs": {
+                        "channel": 6
+                    }
+                }
+            ]
+        }
+    ],
+    "logpath": "/tmp/logs",
+    "testpaths": [
+        "./"
+    ]
+}
diff --git a/packages/antlion/unit_tests/acts_utils_test.py b/packages/antlion/unit_tests/acts_utils_test.py
new file mode 100755
index 0000000..1732350
--- /dev/null
+++ b/packages/antlion/unit_tests/acts_utils_test.py
@@ -0,0 +1,325 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import subprocess
+import unittest
+
+import mock
+
+from antlion import utils
+from antlion.capabilities.ssh import SSHConfig, SSHResult
+from antlion.controllers.android_device import AndroidDevice
+from antlion.controllers.fuchsia_device import FuchsiaDevice
+from antlion.controllers.fuchsia_lib.sl4f import SL4F
+from antlion.controllers.fuchsia_lib.ssh import FuchsiaSSHProvider
+from antlion.controllers.utils_lib.ssh.connection import SshConnection
+from antlion.libs.proc import job
+
+PROVISIONED_STATE_GOOD = 1
+
+MOCK_ENO1_IP_ADDRESSES = """100.127.110.79
+2401:fa00:480:7a00:8d4f:85ff:cc5c:787e
+2401:fa00:480:7a00:459:b993:fcbf:1419
+fe80::c66d:3c75:2cec:1d72"""
+
+MOCK_WLAN1_IP_ADDRESSES = ""
+
+FUCHSIA_INTERFACES = {
+    "id": "1",
+    "result": [
+        {
+            "id": 1,
+            "name": "lo",
+            "ipv4_addresses": [
+                [127, 0, 0, 1],
+            ],
+            "ipv6_addresses": [
+                [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
+            ],
+            "online": True,
+            "mac": [0, 0, 0, 0, 0, 0],
+        },
+        {
+            "id": 2,
+            "name": "eno1",
+            "ipv4_addresses": [
+                [100, 127, 110, 79],
+            ],
+            "ipv6_addresses": [
+                [254, 128, 0, 0, 0, 0, 0, 0, 198, 109, 60, 117, 44, 236, 29, 114],
+                [36, 1, 250, 0, 4, 128, 122, 0, 141, 79, 133, 255, 204, 92, 120, 126],
+                [36, 1, 250, 0, 4, 128, 122, 0, 4, 89, 185, 147, 252, 191, 20, 25],
+            ],
+            "online": True,
+            "mac": [0, 224, 76, 5, 76, 229],
+        },
+        {
+            "id": 3,
+            "name": "wlanxc0",
+            "ipv4_addresses": [],
+            "ipv6_addresses": [
+                [254, 128, 0, 0, 0, 0, 0, 0, 96, 255, 93, 96, 52, 253, 253, 243],
+                [254, 128, 0, 0, 0, 0, 0, 0, 70, 7, 11, 255, 254, 118, 126, 192],
+            ],
+            "online": False,
+            "mac": [68, 7, 11, 118, 126, 192],
+        },
+    ],
+    "error": None,
+}
+
+CORRECT_FULL_IP_LIST = {
+    "ipv4_private": [],
+    "ipv4_public": ["100.127.110.79"],
+    "ipv6_link_local": ["fe80::c66d:3c75:2cec:1d72"],
+    "ipv6_private_local": [],
+    "ipv6_public": [
+        "2401:fa00:480:7a00:8d4f:85ff:cc5c:787e",
+        "2401:fa00:480:7a00:459:b993:fcbf:1419",
+    ],
+}
+
+CORRECT_EMPTY_IP_LIST = {
+    "ipv4_private": [],
+    "ipv4_public": [],
+    "ipv6_link_local": [],
+    "ipv6_private_local": [],
+    "ipv6_public": [],
+}
+
+
+class IpAddressUtilTest(unittest.TestCase):
+    def test_positive_ipv4_normal_address(self):
+        ip_address = "192.168.1.123"
+        self.assertTrue(utils.is_valid_ipv4_address(ip_address))
+
+    def test_positive_ipv4_any_address(self):
+        ip_address = "0.0.0.0"
+        self.assertTrue(utils.is_valid_ipv4_address(ip_address))
+
+    def test_positive_ipv4_broadcast(self):
+        ip_address = "255.255.255.0"
+        self.assertTrue(utils.is_valid_ipv4_address(ip_address))
+
+    def test_negative_ipv4_with_ipv6_address(self):
+        ip_address = "fe80::f693:9fff:fef4:1ac"
+        self.assertFalse(utils.is_valid_ipv4_address(ip_address))
+
+    def test_negative_ipv4_with_invalid_string(self):
+        ip_address = "fdsafdsafdsafdsf"
+        self.assertFalse(utils.is_valid_ipv4_address(ip_address))
+
+    def test_negative_ipv4_with_invalid_number(self):
+        ip_address = "192.168.500.123"
+        self.assertFalse(utils.is_valid_ipv4_address(ip_address))
+
+    def test_positive_ipv6(self):
+        ip_address = "fe80::f693:9fff:fef4:1ac"
+        self.assertTrue(utils.is_valid_ipv6_address(ip_address))
+
+    def test_positive_ipv6_link_local(self):
+        ip_address = "fe80::"
+        self.assertTrue(utils.is_valid_ipv6_address(ip_address))
+
+    def test_negative_ipv6_with_ipv4_address(self):
+        ip_address = "192.168.1.123"
+        self.assertFalse(utils.is_valid_ipv6_address(ip_address))
+
+    def test_negative_ipv6_invalid_characters(self):
+        ip_address = "fe80:jkyr:f693:9fff:fef4:1ac"
+        self.assertFalse(utils.is_valid_ipv6_address(ip_address))
+
+    def test_negative_ipv6_invalid_string(self):
+        ip_address = "fdsafdsafdsafdsf"
+        self.assertFalse(utils.is_valid_ipv6_address(ip_address))
+
+    @mock.patch("antlion.controllers.utils_lib.ssh.connection.SshConnection.run")
+    def test_ssh_get_interface_ip_addresses_full(self, ssh_mock):
+        ssh_mock.side_effect = [
+            job.Result(stdout=bytes(MOCK_ENO1_IP_ADDRESSES, "utf-8"), encoding="utf-8"),
+        ]
+        self.assertEqual(
+            utils.get_interface_ip_addresses(SshConnection("mock_settings"), "eno1"),
+            CORRECT_FULL_IP_LIST,
+        )
+
+    @mock.patch("antlion.controllers.utils_lib.ssh.connection.SshConnection.run")
+    def test_ssh_get_interface_ip_addresses_empty(self, ssh_mock):
+        ssh_mock.side_effect = [
+            job.Result(
+                stdout=bytes(MOCK_WLAN1_IP_ADDRESSES, "utf-8"), encoding="utf-8"
+            ),
+        ]
+        self.assertEqual(
+            utils.get_interface_ip_addresses(SshConnection("mock_settings"), "wlan1"),
+            CORRECT_EMPTY_IP_LIST,
+        )
+
+    @mock.patch("antlion.controllers.adb.AdbProxy")
+    @mock.patch.object(AndroidDevice, "is_bootloader", return_value=True)
+    def test_android_get_interface_ip_addresses_full(self, is_bootloader, adb_mock):
+        adb_mock().shell.side_effect = [
+            MOCK_ENO1_IP_ADDRESSES,
+        ]
+        self.assertEqual(
+            utils.get_interface_ip_addresses(AndroidDevice(), "eno1"),
+            CORRECT_FULL_IP_LIST,
+        )
+
+    @mock.patch("antlion.controllers.adb.AdbProxy")
+    @mock.patch.object(AndroidDevice, "is_bootloader", return_value=True)
+    def test_android_get_interface_ip_addresses_empty(self, is_bootloader, adb_mock):
+        adb_mock().shell.side_effect = [
+            MOCK_WLAN1_IP_ADDRESSES,
+        ]
+        self.assertEqual(
+            utils.get_interface_ip_addresses(AndroidDevice(), "wlan1"),
+            CORRECT_EMPTY_IP_LIST,
+        )
+
+    @mock.patch(
+        "antlion.controllers.fuchsia_device.FuchsiaDevice.sl4f",
+        new_callable=mock.PropertyMock,
+    )
+    @mock.patch(
+        "antlion.controllers.fuchsia_device.FuchsiaDevice.ffx",
+        new_callable=mock.PropertyMock,
+    )
+    @mock.patch("antlion.controllers.fuchsia_lib.sl4f.wait_for_port")
+    @mock.patch("antlion.controllers.fuchsia_lib.ssh.FuchsiaSSHProvider.run")
+    @mock.patch("antlion.capabilities.ssh.SSHProvider.wait_until_reachable")
+    @mock.patch(
+        "antlion.controllers.fuchsia_device." "FuchsiaDevice._generate_ssh_config"
+    )
+    @mock.patch(
+        "antlion.controllers."
+        "fuchsia_lib.netstack.netstack_lib."
+        "FuchsiaNetstackLib.netstackListInterfaces"
+    )
+    def test_fuchsia_get_interface_ip_addresses_full(
+        self,
+        list_interfaces_mock,
+        generate_ssh_config_mock,
+        ssh_wait_until_reachable_mock,
+        ssh_run_mock,
+        wait_for_port_mock,
+        ffx_mock,
+        sl4f_mock,
+    ):
+        # Configure the log path which is required by ACTS logger.
+        logging.log_path = "/tmp/unit_test_garbage"
+
+        ssh = FuchsiaSSHProvider(SSHConfig("192.168.1.1", 22, "/dev/null"))
+        ssh_run_mock.return_value = SSHResult(
+            subprocess.CompletedProcess([], 0, stdout=b"", stderr=b"")
+        )
+
+        # Don't try to wait for the SL4F server to start; it's not being used.
+        wait_for_port_mock.return_value = None
+
+        sl4f_mock.return_value = SL4F(ssh, "http://192.168.1.1:80")
+        ssh_wait_until_reachable_mock.return_value = None
+
+        list_interfaces_mock.return_value = FUCHSIA_INTERFACES
+        self.assertEqual(
+            utils.get_interface_ip_addresses(
+                FuchsiaDevice({"ip": "192.168.1.1"}), "eno1"
+            ),
+            CORRECT_FULL_IP_LIST,
+        )
+
+    @mock.patch(
+        "antlion.controllers.fuchsia_device.FuchsiaDevice.sl4f",
+        new_callable=mock.PropertyMock,
+    )
+    @mock.patch(
+        "antlion.controllers.fuchsia_device.FuchsiaDevice.ffx",
+        new_callable=mock.PropertyMock,
+    )
+    @mock.patch("antlion.controllers.fuchsia_lib.sl4f.wait_for_port")
+    @mock.patch("antlion.controllers.fuchsia_lib.ssh.FuchsiaSSHProvider.run")
+    @mock.patch("antlion.capabilities.ssh.SSHProvider.wait_until_reachable")
+    @mock.patch(
+        "antlion.controllers.fuchsia_device." "FuchsiaDevice._generate_ssh_config"
+    )
+    @mock.patch(
+        "antlion.controllers."
+        "fuchsia_lib.netstack.netstack_lib."
+        "FuchsiaNetstackLib.netstackListInterfaces"
+    )
+    def test_fuchsia_get_interface_ip_addresses_empty(
+        self,
+        list_interfaces_mock,
+        generate_ssh_config_mock,
+        ssh_wait_until_reachable_mock,
+        ssh_run_mock,
+        wait_for_port_mock,
+        ffx_mock,
+        sl4f_mock,
+    ):
+        # Configure the log path which is required by ACTS logger.
+        logging.log_path = "/tmp/unit_test_garbage"
+
+        ssh = FuchsiaSSHProvider(SSHConfig("192.168.1.1", 22, "/dev/null"))
+        ssh_run_mock.return_value = SSHResult(
+            subprocess.CompletedProcess([], 0, stdout=b"", stderr=b"")
+        )
+
+        # Don't try to wait for the SL4F server to start; it's not being used.
+        wait_for_port_mock.return_value = None
+        ssh_wait_until_reachable_mock.return_value = None
+        sl4f_mock.return_value = SL4F(ssh, "http://192.168.1.1:80")
+
+        list_interfaces_mock.return_value = FUCHSIA_INTERFACES
+        self.assertEqual(
+            utils.get_interface_ip_addresses(
+                FuchsiaDevice({"ip": "192.168.1.1"}), "wlan1"
+            ),
+            CORRECT_EMPTY_IP_LIST,
+        )
+
+
+class GetDeviceTest(unittest.TestCase):
+    class TestDevice:
+        def __init__(self, id, device_type=None) -> None:
+            self.id = id
+            if device_type:
+                self.device_type = device_type
+
+    def test_get_device_none(self):
+        devices = []
+        self.assertRaises(ValueError, utils.get_device, devices, "DUT")
+
+    def test_get_device_default_one(self):
+        devices = [self.TestDevice(0)]
+        self.assertEqual(utils.get_device(devices, "DUT").id, 0)
+
+    def test_get_device_default_many(self):
+        devices = [self.TestDevice(0), self.TestDevice(1)]
+        self.assertEqual(utils.get_device(devices, "DUT").id, 0)
+
+    def test_get_device_specified_one(self):
+        devices = [self.TestDevice(0), self.TestDevice(1, "DUT")]
+        self.assertEqual(utils.get_device(devices, "DUT").id, 1)
+
+    def test_get_device_specified_many(self):
+        devices = [self.TestDevice(0, "DUT"), self.TestDevice(1, "DUT")]
+        self.assertRaises(ValueError, utils.get_device, devices, "DUT")
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/__init__.py b/packages/antlion/unit_tests/controllers/__init__.py
similarity index 100%
rename from src/antlion/unit_tests/controllers/__init__.py
rename to packages/antlion/unit_tests/controllers/__init__.py
diff --git a/src/antlion/unit_tests/controllers/android_lib/__init__.py b/packages/antlion/unit_tests/controllers/android_lib/__init__.py
similarity index 100%
rename from src/antlion/unit_tests/controllers/android_lib/__init__.py
rename to packages/antlion/unit_tests/controllers/android_lib/__init__.py
diff --git a/src/antlion/unit_tests/controllers/android_lib/logcat_test.py b/packages/antlion/unit_tests/controllers/android_lib/logcat_test.py
similarity index 89%
rename from src/antlion/unit_tests/controllers/android_lib/logcat_test.py
rename to packages/antlion/unit_tests/controllers/android_lib/logcat_test.py
index 8cefca2..82259f2 100644
--- a/src/antlion/unit_tests/controllers/android_lib/logcat_test.py
+++ b/packages/antlion/unit_tests/controllers/android_lib/logcat_test.py
@@ -17,6 +17,7 @@
 import unittest
 
 import mock
+
 from antlion.controllers.android_lib import logcat
 from antlion.controllers.android_lib.logcat import TimestampTracker
 
@@ -28,7 +29,7 @@
 
     @staticmethod
     def patch(patched):
-        return mock.patch("antlion.controllers.android_lib.logcat.%s" % patched)
+        return mock.patch(f"antlion.controllers.android_lib.logcat.{patched}")
 
     def setUp(self):
         self._get_log_level = logcat._get_log_level
@@ -40,20 +41,20 @@
 
     def test_read_output_sets_last_timestamp_if_found(self):
         tracker = TimestampTracker()
-        tracker.read_output(BASE_TIMESTAMP + "D message")
+        tracker.read_output(f"{BASE_TIMESTAMP}D message")
 
         self.assertEqual(tracker.last_timestamp, "2000-01-01 12:34:56.789")
 
     def test_read_output_keeps_last_timestamp_if_no_new_stamp_is_found(self):
         tracker = TimestampTracker()
-        tracker.read_output(BASE_TIMESTAMP + "D message")
+        tracker.read_output(f"{BASE_TIMESTAMP}D message")
         tracker.read_output("--------- beginning of main")
 
         self.assertEqual(tracker.last_timestamp, "2000-01-01 12:34:56.789")
 
     def test_read_output_updates_timestamp_to_first_in_results(self):
         tracker = TimestampTracker()
-        tracker.read_output(BASE_TIMESTAMP + "D 9999-99-99 12:34:56.789")
+        tracker.read_output(f"{BASE_TIMESTAMP}D 9999-99-99 12:34:56.789")
 
         self.assertEqual(tracker.last_timestamp, "2000-01-01 12:34:56.789")
 
@@ -61,31 +62,31 @@
 
     def test_get_log_level_verbose(self):
         """Tests that Logcat's verbose logs make it to the debug level."""
-        level = logcat._get_log_level(BASE_TIMESTAMP + "V")
+        level = logcat._get_log_level(f"{BASE_TIMESTAMP}V")
 
         self.assertEqual(level, logging.DEBUG)
 
     def test_get_log_level_debug(self):
         """Tests that Logcat's debug logs make it to the debug level."""
-        level = logcat._get_log_level(BASE_TIMESTAMP + "D")
+        level = logcat._get_log_level(f"{BASE_TIMESTAMP}D")
 
         self.assertEqual(level, logging.DEBUG)
 
     def test_get_log_level_info(self):
         """Tests that Logcat's info logs make it to the info level."""
-        level = logcat._get_log_level(BASE_TIMESTAMP + "I")
+        level = logcat._get_log_level(f"{BASE_TIMESTAMP}I")
 
         self.assertEqual(level, logging.INFO)
 
     def test_get_log_level_warning(self):
         """Tests that Logcat's warning logs make it to the warning level."""
-        level = logcat._get_log_level(BASE_TIMESTAMP + "W")
+        level = logcat._get_log_level(f"{BASE_TIMESTAMP}W")
 
         self.assertEqual(level, logging.WARNING)
 
     def test_get_log_level_error(self):
         """Tests that Logcat's error logs make it to the error level."""
-        level = logcat._get_log_level(BASE_TIMESTAMP + "E")
+        level = logcat._get_log_level(f"{BASE_TIMESTAMP}E")
 
         self.assertEqual(level, logging.ERROR)
 
@@ -126,7 +127,7 @@
         tracker.read_output(BASE_TIMESTAMP)
         new_command = logcat._on_retry("S3R14L", "extra_params", tracker)(None)
 
-        self.assertIn('-T "%s"' % tracker.last_timestamp, new_command)
+        self.assertIn(f'-T "{tracker.last_timestamp}"', new_command)
 
     def test_on_retry_func_returns_string_that_contains_the_given_serial(self):
         tracker = TimestampTracker()
diff --git a/src/antlion/unit_tests/controllers/android_lib/services_test.py b/packages/antlion/unit_tests/controllers/android_lib/services_test.py
similarity index 95%
rename from src/antlion/unit_tests/controllers/android_lib/services_test.py
rename to packages/antlion/unit_tests/controllers/android_lib/services_test.py
index 9510e21..d0cd787 100644
--- a/src/antlion/unit_tests/controllers/android_lib/services_test.py
+++ b/packages/antlion/unit_tests/controllers/android_lib/services_test.py
@@ -18,8 +18,10 @@
 from unittest import mock
 
 from antlion.controllers.android_lib import services
-from antlion.controllers.android_lib.events import AndroidStartServicesEvent
-from antlion.controllers.android_lib.events import AndroidStopServicesEvent
+from antlion.controllers.android_lib.events import (
+    AndroidStartServicesEvent,
+    AndroidStopServicesEvent,
+)
 from antlion.event import event_bus
 
 
diff --git a/src/antlion/unit_tests/controllers/ap_lib/__init__.py b/packages/antlion/unit_tests/controllers/ap_lib/__init__.py
similarity index 100%
rename from src/antlion/unit_tests/controllers/ap_lib/__init__.py
rename to packages/antlion/unit_tests/controllers/ap_lib/__init__.py
diff --git a/src/antlion/unit_tests/controllers/ap_lib/dhcp_config_test.py b/packages/antlion/unit_tests/controllers/ap_lib/dhcp_config_test.py
similarity index 99%
rename from src/antlion/unit_tests/controllers/ap_lib/dhcp_config_test.py
rename to packages/antlion/unit_tests/controllers/ap_lib/dhcp_config_test.py
index 4770549..c4841b3 100644
--- a/src/antlion/unit_tests/controllers/ap_lib/dhcp_config_test.py
+++ b/packages/antlion/unit_tests/controllers/ap_lib/dhcp_config_test.py
@@ -17,7 +17,7 @@
 import ipaddress
 import unittest
 
-from antlion.controllers.ap_lib.dhcp_config import DhcpConfig, Subnet, StaticMapping
+from antlion.controllers.ap_lib.dhcp_config import DhcpConfig, Subnet
 
 
 class DhcpConfigTest(unittest.TestCase):
diff --git a/packages/antlion/unit_tests/controllers/ap_lib/hostapd_test.py b/packages/antlion/unit_tests/controllers/ap_lib/hostapd_test.py
new file mode 100644
index 0000000..239c3fe
--- /dev/null
+++ b/packages/antlion/unit_tests/controllers/ap_lib/hostapd_test.py
@@ -0,0 +1,103 @@
+#!/usr/bin/env python3
+#
+# Copyright 2023 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+from unittest.mock import Mock
+
+from antlion.controllers.ap_lib import hostapd
+from antlion.libs.proc.job import Result
+
+# MAC address that will be used in these tests.
+STA_MAC = "aa:bb:cc:dd:ee:ff"
+
+# Abbreviated output of hostapd_cli STA commands, showing various AUTH/ASSOC/AUTHORIZED states.
+STA_OUTPUT_WITHOUT_STA_AUTHENTICATED = b"""aa:bb:cc:dd:ee:ff
+flags=[WMM][HT][VHT]"""
+
+STA_OUTPUT_WITH_STA_AUTHENTICATED = b"""aa:bb:cc:dd:ee:ff
+flags=[AUTH][WMM][HT][VHT]"""
+
+STA_OUTPUT_WITH_STA_ASSOCIATED = b"""aa:bb:cc:dd:ee:ff
+flags=[AUTH][ASSOC][WMM][HT][VHT]
+aid=42"""
+
+STA_OUTPUT_WITH_STA_AUTHORIZED = b"""aa:bb:cc:dd:ee:ff
+flags=[AUTH][ASSOC][AUTHORIZED][WMM][HT][VHT]
+aid=42"""
+
+
+class HostapdTest(unittest.TestCase):
+    def test_sta_authenticated_true_for_authenticated_sta(self):
+        hostapd_mock = hostapd.Hostapd("mock_runner", "wlan0")
+        hostapd_mock._run_hostapd_cli_cmd = Mock(
+            return_value=Result(
+                command=list(), stdout=STA_OUTPUT_WITH_STA_AUTHENTICATED, exit_status=0
+            )
+        )
+        self.assertTrue(hostapd_mock.sta_authenticated(STA_MAC))
+
+    def test_sta_authenticated_false_for_unauthenticated_sta(self):
+        hostapd_mock = hostapd.Hostapd("mock_runner", "wlan0")
+        hostapd_mock._run_hostapd_cli_cmd = Mock(
+            return_value=Result(
+                command=list(),
+                stdout=STA_OUTPUT_WITHOUT_STA_AUTHENTICATED,
+                exit_status=0,
+            )
+        )
+        self.assertFalse(hostapd_mock.sta_authenticated(STA_MAC))
+
+    def test_sta_associated_true_for_associated_sta(self):
+        hostapd_mock = hostapd.Hostapd("mock_runner", "wlan0")
+        hostapd_mock._run_hostapd_cli_cmd = Mock(
+            return_value=Result(
+                command=list(), stdout=STA_OUTPUT_WITH_STA_ASSOCIATED, exit_status=0
+            )
+        )
+        self.assertTrue(hostapd_mock.sta_associated(STA_MAC))
+
+    def test_sta_associated_false_for_unassociated_sta(self):
+        hostapd_mock = hostapd.Hostapd("mock_runner", "wlan0")
+        # Uses the authenticated-only CLI output.
+        hostapd_mock._run_hostapd_cli_cmd = Mock(
+            return_value=Result(
+                command=list(), stdout=STA_OUTPUT_WITH_STA_AUTHENTICATED, exit_status=0
+            )
+        )
+        self.assertFalse(hostapd_mock.sta_associated(STA_MAC))
+
+    def test_sta_authorized_true_for_authorized_sta(self):
+        hostapd_mock = hostapd.Hostapd("mock_runner", "wlan0")
+        hostapd_mock._run_hostapd_cli_cmd = Mock(
+            return_value=Result(
+                command=list(), stdout=STA_OUTPUT_WITH_STA_AUTHORIZED, exit_status=0
+            )
+        )
+        self.assertTrue(hostapd_mock.sta_authorized(STA_MAC))
+
+    def test_sta_associated_false_for_unassociated_sta(self):
+        hostapd_mock = hostapd.Hostapd("mock_runner", "wlan0")
+        # Uses the associated-only CLI output.
+        hostapd_mock._run_hostapd_cli_cmd = Mock(
+            return_value=Result(
+                command=list(), stdout=STA_OUTPUT_WITH_STA_ASSOCIATED, exit_status=0
+            )
+        )
+        self.assertFalse(hostapd_mock.sta_authorized(STA_MAC))
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/src/antlion/unit_tests/controllers/ap_lib/radio_measurement_test.py b/packages/antlion/unit_tests/controllers/ap_lib/radio_measurement_test.py
similarity index 100%
rename from src/antlion/unit_tests/controllers/ap_lib/radio_measurement_test.py
rename to packages/antlion/unit_tests/controllers/ap_lib/radio_measurement_test.py
diff --git a/src/antlion/unit_tests/controllers/ap_lib/radvd_test.py b/packages/antlion/unit_tests/controllers/ap_lib/radvd_test.py
similarity index 97%
rename from src/antlion/unit_tests/controllers/ap_lib/radvd_test.py
rename to packages/antlion/unit_tests/controllers/ap_lib/radvd_test.py
index 19d9f7e..da97ddc 100644
--- a/src/antlion/unit_tests/controllers/ap_lib/radvd_test.py
+++ b/packages/antlion/unit_tests/controllers/ap_lib/radvd_test.py
@@ -19,9 +19,7 @@
 from unittest.mock import patch
 
 from antlion.controllers.ap_lib import radvd_constants
-from antlion.controllers.ap_lib.radvd import Error
-from antlion.controllers.ap_lib.radvd import Radvd
-
+from antlion.controllers.ap_lib.radvd import Radvd, RadvdStartError
 from antlion.controllers.ap_lib.radvd_config import RadvdConfig
 
 SEARCH_FILE = "antlion.controllers.utils_lib.commands.shell." "ShellCommand.search_file"
@@ -131,7 +129,7 @@
         is_alive_mock.return_value = False
         search_file_mock.return_value = False
         radvd_mock = Radvd("mock_runner", "wlan0")
-        with self.assertRaises(Error) as context:
+        with self.assertRaises(RadvdStartError) as context:
             radvd_mock._scan_for_errors(True)
         self.assertTrue("Radvd failed to start" in str(context.exception))
 
@@ -141,7 +139,7 @@
         is_alive_mock.return_value = True
         search_file_mock.return_value = True
         radvd_mock = Radvd("mock_runner", "wlan0")
-        with self.assertRaises(Error) as context:
+        with self.assertRaises(RadvdStartError) as context:
             radvd_mock._scan_for_errors(True)
         self.assertTrue("Radvd exited prematurely." in str(context.exception))
 
diff --git a/src/antlion/unit_tests/controllers/ap_lib/wireless_network_management_test.py b/packages/antlion/unit_tests/controllers/ap_lib/wireless_network_management_test.py
similarity index 100%
rename from src/antlion/unit_tests/controllers/ap_lib/wireless_network_management_test.py
rename to packages/antlion/unit_tests/controllers/ap_lib/wireless_network_management_test.py
diff --git a/src/antlion/unit_tests/controllers/iperf_client_test.py b/packages/antlion/unit_tests/controllers/iperf_client_test.py
similarity index 77%
rename from src/antlion/unit_tests/controllers/iperf_client_test.py
rename to packages/antlion/unit_tests/controllers/iperf_client_test.py
index 63b6d90..f387cf9 100644
--- a/src/antlion/unit_tests/controllers/iperf_client_test.py
+++ b/packages/antlion/unit_tests/controllers/iperf_client_test.py
@@ -14,16 +14,19 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 import logging
+import os
 import unittest
 
 import mock
-import os
 
+from antlion.capabilities.ssh import SSHConfig, SSHProvider
 from antlion.controllers import iperf_client
-from antlion.controllers.iperf_client import IPerfClient
-from antlion.controllers.iperf_client import IPerfClientBase
-from antlion.controllers.iperf_client import IPerfClientOverAdb
-from antlion.controllers.iperf_client import IPerfClientOverSsh
+from antlion.controllers.iperf_client import (
+    IPerfClient,
+    IPerfClientBase,
+    IPerfClientOverAdb,
+    IPerfClientOverSsh,
+)
 
 # The position in the call tuple that represents the args array.
 ARGS = 0
@@ -42,10 +45,22 @@
             "Unable to create IPerfClientOverAdb from create().",
         )
 
-    def test_create_can_create_client_over_ssh(self):
+    @mock.patch("subprocess.run")
+    @mock.patch("socket.create_connection")
+    def test_create_can_create_client_over_ssh(
+        self, mock_socket_create_connection, mock_subprocess_run
+    ):
         self.assertIsInstance(
             iperf_client.create(
-                [{"ssh_config": {"user": "root", "host": "192.168.42.11"}}]
+                [
+                    {
+                        "ssh_config": {
+                            "user": "root",
+                            "host": "192.168.42.11",
+                            "identity_file": "/dev/null",
+                        }
+                    }
+                ]
             )[0],
             IPerfClientOverSsh,
             "Unable to create IPerfClientOverSsh from create().",
@@ -100,19 +115,25 @@
 class IPerfClientOverSshTest(unittest.TestCase):
     """Test antlion.controllers.iperf_client.IPerfClientOverSshTest."""
 
+    @mock.patch("socket.create_connection")
+    @mock.patch("subprocess.run")
     @mock.patch("builtins.open")
-    def test_start_writes_output_to_full_file_path(self, mock_open):
-        client = IPerfClientOverSsh({"host": "192.168.42.11", "user": "root"})
-        client._ssh_session = mock.Mock()
+    def test_start_writes_output_to_full_file_path(
+        self, mock_open, mock_subprocess_run, mock_socket_create_connection
+    ):
+        ssh_provider = SSHProvider(
+            SSHConfig(
+                user="root",
+                host_name="192.168.42.11",
+                identity_file="/dev/null",
+            )
+        )
+        client = IPerfClientOverSsh(ssh_provider)
         file_path = "/path/to/foo"
         client._get_full_file_path = lambda _: file_path
-
         client.start("127.0.0.1", "IPERF_ARGS", "TAG")
-
         mock_open.assert_called_with(file_path, "w")
-        mock_open().__enter__().write.assert_called_with(
-            client._ssh_session.run().stdout
-        )
+        mock_open().__enter__().write.assert_called()
 
 
 class IPerfClientOverAdbTest(unittest.TestCase):
@@ -122,7 +143,6 @@
     def test_start_writes_output_to_full_file_path(self, mock_open):
         client = IPerfClientOverAdb(None)
         file_path = "/path/to/foo"
-        expected_output = "output"
         client._get_full_file_path = lambda _: file_path
 
         with mock.patch(
diff --git a/src/antlion/unit_tests/controllers/iperf_server_test.py b/packages/antlion/unit_tests/controllers/iperf_server_test.py
similarity index 94%
rename from src/antlion/unit_tests/controllers/iperf_server_test.py
rename to packages/antlion/unit_tests/controllers/iperf_server_test.py
index 560a7e7..75b8a5e 100644
--- a/src/antlion/unit_tests/controllers/iperf_server_test.py
+++ b/packages/antlion/unit_tests/controllers/iperf_server_test.py
@@ -14,15 +14,17 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 import logging
+import os
 import unittest
 
 import mock
-import os
 
 from antlion.controllers import iperf_server
-from antlion.controllers.iperf_server import IPerfServer
-from antlion.controllers.iperf_server import IPerfServerOverAdb
-from antlion.controllers.iperf_server import IPerfServerOverSsh
+from antlion.controllers.iperf_server import (
+    IPerfServer,
+    IPerfServerOverAdb,
+    IPerfServerOverSsh,
+)
 from antlion.controllers.utils_lib.ssh import settings
 
 # The position in the call tuple that represents the args array.
@@ -58,9 +60,18 @@
     @mock.patch("antlion.controllers.iperf_server.utils")
     def test_create_creates_server_over_ssh_with_ssh_config_and_port(self, _):
         self.assertIsInstance(
-            iperf_server.create([{"ssh_config": {"user": "", "host": ""}, "port": ""}])[
-                0
-            ],
+            iperf_server.create(
+                [
+                    {
+                        "ssh_config": {
+                            "user": "",
+                            "host": "",
+                            "identity_file": "/dev/null",
+                        },
+                        "port": "",
+                    }
+                ]
+            )[0],
             IPerfServerOverSsh,
             "create() failed to create IPerfServerOverSsh for a valid config.",
         )
@@ -209,7 +220,12 @@
 class IPerfServerOverSshTest(unittest.TestCase):
     """Tests antlion.controllers.iperf_server.IPerfServerOverSsh."""
 
-    INIT_ARGS = [settings.from_config({"host": "TEST_HOST", "user": "test"}), "PORT"]
+    INIT_ARGS = [
+        settings.from_config(
+            {"host": "TEST_HOST", "user": "test", "identity_file": "/dev/null"}
+        ),
+        "PORT",
+    ]
 
     @mock.patch("antlion.controllers.iperf_server.connection")
     def test_start_makes_started_true(self, _):
diff --git a/src/antlion/unit_tests/controllers/pdu_lib/__init__.py b/packages/antlion/unit_tests/controllers/pdu_lib/__init__.py
similarity index 100%
rename from src/antlion/unit_tests/controllers/pdu_lib/__init__.py
rename to packages/antlion/unit_tests/controllers/pdu_lib/__init__.py
diff --git a/src/antlion/unit_tests/controllers/pdu_lib/synaccess/__init__.py b/packages/antlion/unit_tests/controllers/pdu_lib/synaccess/__init__.py
similarity index 100%
rename from src/antlion/unit_tests/controllers/pdu_lib/synaccess/__init__.py
rename to packages/antlion/unit_tests/controllers/pdu_lib/synaccess/__init__.py
diff --git a/src/antlion/unit_tests/controllers/pdu_lib/synaccess/np02b_test.py b/packages/antlion/unit_tests/controllers/pdu_lib/synaccess/np02b_test.py
similarity index 96%
rename from src/antlion/unit_tests/controllers/pdu_lib/synaccess/np02b_test.py
rename to packages/antlion/unit_tests/controllers/pdu_lib/synaccess/np02b_test.py
index e5ee543..b852fe0 100644
--- a/src/antlion/unit_tests/controllers/pdu_lib/synaccess/np02b_test.py
+++ b/packages/antlion/unit_tests/controllers/pdu_lib/synaccess/np02b_test.py
@@ -19,7 +19,7 @@
 from unittest.mock import patch
 
 from antlion.controllers.pdu import PduError
-from antlion.controllers.pdu_lib.synaccess.np02b import _TNHelperNP02B, PduDevice
+from antlion.controllers.pdu_lib.synaccess.np02b import PduDevice, _TNHelperNP02B
 
 # Test Constants
 HOST = "192.168.1.2"
@@ -50,7 +50,7 @@
         """cmd should strip whitespace and encode in ASCII."""
         tnhelper = _TNHelperNP02B(HOST)
         telnetlib_mock.Telnet().read_until.return_value = VALID_RESPONSE_BYTE_STR
-        res = tnhelper.cmd(VALID_COMMAND_STR)
+        tnhelper.cmd(VALID_COMMAND_STR)
         telnetlib_mock.Telnet().write.assert_called_with(VALID_COMMAND_BYTE_STR)
 
     @patch("antlion.controllers.pdu_lib.synaccess.np02b.time.sleep")
@@ -84,7 +84,7 @@
             INVALID_COMMAND_OUTPUT_BYTE_STR
         )
         with self.assertRaises(PduError):
-            res = tnhelper.cmd("Some invalid command.")
+            tnhelper.cmd("Some invalid command.")
 
 
 class NP02BPduDeviceTest(unittest.TestCase):
diff --git a/src/antlion/unit_tests/controllers/sl4a_lib/__init__.py b/packages/antlion/unit_tests/controllers/sl4a_lib/__init__.py
similarity index 100%
rename from src/antlion/unit_tests/controllers/sl4a_lib/__init__.py
rename to packages/antlion/unit_tests/controllers/sl4a_lib/__init__.py
diff --git a/src/antlion/unit_tests/controllers/sl4a_lib/rpc_client_test.py b/packages/antlion/unit_tests/controllers/sl4a_lib/rpc_client_test.py
similarity index 100%
rename from src/antlion/unit_tests/controllers/sl4a_lib/rpc_client_test.py
rename to packages/antlion/unit_tests/controllers/sl4a_lib/rpc_client_test.py
diff --git a/src/antlion/unit_tests/controllers/sl4a_lib/rpc_connection_test.py b/packages/antlion/unit_tests/controllers/sl4a_lib/rpc_connection_test.py
similarity index 96%
rename from src/antlion/unit_tests/controllers/sl4a_lib/rpc_connection_test.py
rename to packages/antlion/unit_tests/controllers/sl4a_lib/rpc_connection_test.py
index d76368f..34b6384 100755
--- a/src/antlion/unit_tests/controllers/sl4a_lib/rpc_connection_test.py
+++ b/packages/antlion/unit_tests/controllers/sl4a_lib/rpc_connection_test.py
@@ -13,9 +13,10 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-import mock
 import unittest
 
+import mock
+
 from antlion.controllers.sl4a_lib import rpc_client, rpc_connection
 
 MOCK_RESP = b'{"id": 0, "result": 123, "error": null, "status": 1, "uid": 1}'
@@ -62,7 +63,7 @@
 
         def pass_on_init(start_command):
             if not start_command == rpc_connection.Sl4aConnectionCommand.INIT:
-                self.fail('Must call "init". Called "%s" instead.' % start_command)
+                self.fail(f'Must call "init". Called "{start_command}" instead.')
 
         connection = self.mock_rpc_connection()
         connection._initiate_handshake = pass_on_init
@@ -76,7 +77,7 @@
 
         def pass_on_continue(start_command):
             if start_command != rpc_connection.Sl4aConnectionCommand.CONTINUE:
-                self.fail('Must call "continue". Called "%s" instead.' % start_command)
+                self.fail(f'Must call "continue". Called "{start_command}" instead.')
 
         connection = self.mock_rpc_connection(uid=1)
         connection._initiate_handshake = pass_on_continue
diff --git a/src/antlion/unit_tests/controllers/sl4a_lib/sl4a_manager_test.py b/packages/antlion/unit_tests/controllers/sl4a_lib/sl4a_manager_test.py
similarity index 99%
rename from src/antlion/unit_tests/controllers/sl4a_lib/sl4a_manager_test.py
rename to packages/antlion/unit_tests/controllers/sl4a_lib/sl4a_manager_test.py
index 783cee2..6d42989 100755
--- a/src/antlion/unit_tests/controllers/sl4a_lib/sl4a_manager_test.py
+++ b/packages/antlion/unit_tests/controllers/sl4a_lib/sl4a_manager_test.py
@@ -13,11 +13,11 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-import mock
 import unittest
 
-from antlion.controllers.sl4a_lib import sl4a_manager
-from antlion.controllers.sl4a_lib import rpc_client
+import mock
+
+from antlion.controllers.sl4a_lib import rpc_client, sl4a_manager
 
 
 class Sl4aManagerFactoryTest(unittest.TestCase):
diff --git a/src/antlion/unit_tests/controllers/sl4a_lib/sl4a_session_test.py b/packages/antlion/unit_tests/controllers/sl4a_lib/sl4a_session_test.py
similarity index 98%
rename from src/antlion/unit_tests/controllers/sl4a_lib/sl4a_session_test.py
rename to packages/antlion/unit_tests/controllers/sl4a_lib/sl4a_session_test.py
index e812313..c28ba5b 100755
--- a/src/antlion/unit_tests/controllers/sl4a_lib/sl4a_session_test.py
+++ b/packages/antlion/unit_tests/controllers/sl4a_lib/sl4a_session_test.py
@@ -14,15 +14,15 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 import errno
-import mock
-from socket import timeout
-from socket import error as socket_error
 import unittest
+from socket import error as socket_error
+from socket import timeout
+
+import mock
 from mock import patch
 
 from antlion.controllers.adb_lib.error import AdbError
-from antlion.controllers.sl4a_lib import sl4a_ports
-from antlion.controllers.sl4a_lib import rpc_client
+from antlion.controllers.sl4a_lib import rpc_client, sl4a_ports
 from antlion.controllers.sl4a_lib.rpc_client import Sl4aStartError
 from antlion.controllers.sl4a_lib.sl4a_session import Sl4aSession
 
diff --git a/src/antlion/unit_tests/event/__init__.py b/packages/antlion/unit_tests/event/__init__.py
similarity index 100%
rename from src/antlion/unit_tests/event/__init__.py
rename to packages/antlion/unit_tests/event/__init__.py
diff --git a/packages/antlion/unit_tests/event/decorators_test.py b/packages/antlion/unit_tests/event/decorators_test.py
new file mode 100755
index 0000000..e1542b5
--- /dev/null
+++ b/packages/antlion/unit_tests/event/decorators_test.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import unittest
+from unittest import TestCase
+
+from mock import Mock
+
+from antlion.event.decorators import subscribe_static
+from antlion.event.subscription_handle import SubscriptionHandle
+
+
+class DecoratorsTest(TestCase):
+    """Tests the decorators found in antlion.event.decorators."""
+
+    def test_subscribe_static_return_type(self):
+        """Tests that the subscribe_static is the correct type."""
+        mock = Mock()
+
+        @subscribe_static(type)
+        def test(_):
+            return mock
+
+        self.assertTrue(isinstance(test, SubscriptionHandle))
+
+    def test_subscribe_static_calling_the_function_returns_normally(self):
+        """Tests that functions decorated by subscribe_static can be called."""
+        static_mock = Mock()
+
+        @subscribe_static(type)
+        def test(_):
+            return static_mock
+
+        self.assertEqual(test(Mock()), static_mock)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/src/antlion/unit_tests/event/event_bus_test.py b/packages/antlion/unit_tests/event/event_bus_test.py
similarity index 99%
rename from src/antlion/unit_tests/event/event_bus_test.py
rename to packages/antlion/unit_tests/event/event_bus_test.py
index 5df19b8..f7657ce 100755
--- a/src/antlion/unit_tests/event/event_bus_test.py
+++ b/packages/antlion/unit_tests/event/event_bus_test.py
@@ -16,8 +16,7 @@
 import unittest
 from unittest import TestCase
 
-from mock import Mock
-from mock import patch
+from mock import Mock, patch
 
 from antlion.event import event_bus
 from antlion.event.event import Event
diff --git a/src/antlion/unit_tests/event/event_subscription_test.py b/packages/antlion/unit_tests/event/event_subscription_test.py
similarity index 100%
rename from src/antlion/unit_tests/event/event_subscription_test.py
rename to packages/antlion/unit_tests/event/event_subscription_test.py
diff --git a/src/antlion/unit_tests/libs/__init__.py b/packages/antlion/unit_tests/libs/__init__.py
similarity index 100%
rename from src/antlion/unit_tests/libs/__init__.py
rename to packages/antlion/unit_tests/libs/__init__.py
diff --git a/src/antlion/unit_tests/libs/logging/__init__.py b/packages/antlion/unit_tests/libs/logging/__init__.py
similarity index 100%
rename from src/antlion/unit_tests/libs/logging/__init__.py
rename to packages/antlion/unit_tests/libs/logging/__init__.py
diff --git a/src/antlion/unit_tests/libs/logging/log_stream_test.py b/packages/antlion/unit_tests/libs/logging/log_stream_test.py
similarity index 97%
rename from src/antlion/unit_tests/libs/logging/log_stream_test.py
rename to packages/antlion/unit_tests/libs/logging/log_stream_test.py
index 2dc8790..2a4da06 100755
--- a/src/antlion/unit_tests/libs/logging/log_stream_test.py
+++ b/packages/antlion/unit_tests/libs/logging/log_stream_test.py
@@ -21,10 +21,12 @@
 
 from antlion import context
 from antlion.libs.logging import log_stream
-from antlion.libs.logging.log_stream import AlsoToLogHandler
-from antlion.libs.logging.log_stream import InvalidStyleSetError
-from antlion.libs.logging.log_stream import LogStyles
-from antlion.libs.logging.log_stream import _LogStream
+from antlion.libs.logging.log_stream import (
+    AlsoToLogHandler,
+    InvalidStyleSetError,
+    LogStyles,
+    _LogStream,
+)
 
 
 class TestClass(object):
@@ -43,7 +45,7 @@
     @staticmethod
     def patch(imported_name, *args, **kwargs):
         return mock.patch(
-            "antlion.libs.logging.log_stream.%s" % imported_name, *args, **kwargs
+            f"antlion.libs.logging.log_stream.{imported_name}", *args, **kwargs
         )
 
     @classmethod
@@ -346,9 +348,7 @@
             log_stream.create_logger(
                 self._testMethodName, log_styles=info_monolith_log, base_path=base_path
             )
-            expected = os.path.join(
-                base_path, "%s_%s.txt" % (self._testMethodName, "info")
-            )
+            expected = os.path.join(base_path, f"{self._testMethodName}_{'info'}.txt")
             file_handler.assert_called_with(expected)
 
     # __remove_handler
diff --git a/src/antlion/unit_tests/libs/ota/__init__.py b/packages/antlion/unit_tests/libs/ota/__init__.py
similarity index 100%
rename from src/antlion/unit_tests/libs/ota/__init__.py
rename to packages/antlion/unit_tests/libs/ota/__init__.py
diff --git a/src/antlion/unit_tests/libs/ota/dummy_ota_package.zip b/packages/antlion/unit_tests/libs/ota/dummy_ota_package.zip
similarity index 100%
rename from src/antlion/unit_tests/libs/ota/dummy_ota_package.zip
rename to packages/antlion/unit_tests/libs/ota/dummy_ota_package.zip
Binary files differ
diff --git a/src/antlion/unit_tests/libs/ota/ota_runners/__init__.py b/packages/antlion/unit_tests/libs/ota/ota_runners/__init__.py
similarity index 100%
rename from src/antlion/unit_tests/libs/ota/ota_runners/__init__.py
rename to packages/antlion/unit_tests/libs/ota/ota_runners/__init__.py
diff --git a/src/antlion/unit_tests/libs/ota/ota_runners/ota_runner_factory_test.py b/packages/antlion/unit_tests/libs/ota/ota_runners/ota_runner_factory_test.py
similarity index 93%
rename from src/antlion/unit_tests/libs/ota/ota_runners/ota_runner_factory_test.py
rename to packages/antlion/unit_tests/libs/ota/ota_runners/ota_runner_factory_test.py
index 633f577..f99346b 100644
--- a/src/antlion/unit_tests/libs/ota/ota_runners/ota_runner_factory_test.py
+++ b/packages/antlion/unit_tests/libs/ota/ota_runners/ota_runner_factory_test.py
@@ -14,15 +14,13 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import logging
 import unittest
 
-import logging
 import mock
 
-from antlion.controllers import android_device
-from antlion.libs.ota.ota_runners import ota_runner
-from antlion.libs.ota.ota_runners import ota_runner_factory
-from antlion import config_parser
+from antlion.libs.ota.ota_runners import ota_runner, ota_runner_factory
+from antlion.libs.ota.ota_runners.ota_runner_factory import OtaConfigError
 
 
 class OtaRunnerFactoryTests(unittest.TestCase):
@@ -34,14 +32,14 @@
 
     def test_get_ota_value_from_config_no_map_key_missing(self):
         acts_config = {}
-        with self.assertRaises(config_parser.ActsConfigError):
+        with self.assertRaises(OtaConfigError):
             ota_runner_factory.get_ota_value_from_config(
                 acts_config, "ota_tool", self.device
             )
 
     def test_get_ota_value_from_config_with_map_key_missing(self):
         acts_config = {"ota_map": {"fake_serial": "MockOtaTool"}}
-        with self.assertRaises(config_parser.ActsConfigError):
+        with self.assertRaises(OtaConfigError):
             ota_runner_factory.get_ota_value_from_config(
                 acts_config, "ota_tool", self.device
             )
@@ -63,7 +61,7 @@
         }
         try:
             ota_runner_factory.create_from_configs(acts_config, self.device)
-        except config_parser.ActsConfigError:
+        except OtaConfigError:
             return
         self.fail(
             "create_from_configs did not throw an error when a tool was"
diff --git a/src/antlion/unit_tests/libs/ota/ota_runners/ota_runner_test.py b/packages/antlion/unit_tests/libs/ota/ota_runners/ota_runner_test.py
similarity index 99%
rename from src/antlion/unit_tests/libs/ota/ota_runners/ota_runner_test.py
rename to packages/antlion/unit_tests/libs/ota/ota_runners/ota_runner_test.py
index 114ec9b..37a74c1 100644
--- a/src/antlion/unit_tests/libs/ota/ota_runners/ota_runner_test.py
+++ b/packages/antlion/unit_tests/libs/ota/ota_runners/ota_runner_test.py
@@ -14,13 +14,13 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import unittest
-import mock
 import os
+import unittest
 
-from antlion.libs.ota.ota_tools import ota_tool
+import mock
+
 from antlion.libs.ota.ota_runners import ota_runner
-from antlion.controllers import android_device
+from antlion.libs.ota.ota_tools import ota_tool
 
 
 class MockOtaTool(ota_tool.OtaTool):
diff --git a/src/antlion/unit_tests/libs/ota/ota_tools/__init__.py b/packages/antlion/unit_tests/libs/ota/ota_tools/__init__.py
similarity index 100%
rename from src/antlion/unit_tests/libs/ota/ota_tools/__init__.py
rename to packages/antlion/unit_tests/libs/ota/ota_tools/__init__.py
diff --git a/src/antlion/unit_tests/libs/ota/ota_tools/adb_sideload_ota_tool_test.py b/packages/antlion/unit_tests/libs/ota/ota_tools/adb_sideload_ota_tool_test.py
similarity index 95%
rename from src/antlion/unit_tests/libs/ota/ota_tools/adb_sideload_ota_tool_test.py
rename to packages/antlion/unit_tests/libs/ota/ota_tools/adb_sideload_ota_tool_test.py
index 19e4abf..51daad1 100644
--- a/src/antlion/unit_tests/libs/ota/ota_tools/adb_sideload_ota_tool_test.py
+++ b/packages/antlion/unit_tests/libs/ota/ota_tools/adb_sideload_ota_tool_test.py
@@ -15,14 +15,14 @@
 # limitations under the License.
 
 import logging
-import mock
 import os
 import unittest
 
+import mock
+
 from antlion.controllers import android_device
 from antlion.libs.ota.ota_runners import ota_runner
-from antlion.libs.ota.ota_tools import ota_tool
-from antlion.libs.ota.ota_tools import adb_sideload_ota_tool
+from antlion.libs.ota.ota_tools import adb_sideload_ota_tool, ota_tool
 
 
 def get_mock_android_device(serial="", ssh_connection=None):
diff --git a/src/antlion/unit_tests/libs/ota/ota_tools/ota_tool_factory_test.py b/packages/antlion/unit_tests/libs/ota/ota_tools/ota_tool_factory_test.py
similarity index 99%
rename from src/antlion/unit_tests/libs/ota/ota_tools/ota_tool_factory_test.py
rename to packages/antlion/unit_tests/libs/ota/ota_tools/ota_tool_factory_test.py
index 1bf516a..4769171 100644
--- a/src/antlion/unit_tests/libs/ota/ota_tools/ota_tool_factory_test.py
+++ b/packages/antlion/unit_tests/libs/ota/ota_tools/ota_tool_factory_test.py
@@ -15,6 +15,7 @@
 # limitations under the License.
 
 import unittest
+
 from antlion.libs.ota.ota_tools import ota_tool_factory
 
 
diff --git a/src/antlion/unit_tests/libs/ota/ota_tools/ota_tool_test.py b/packages/antlion/unit_tests/libs/ota/ota_tools/ota_tool_test.py
similarity index 99%
rename from src/antlion/unit_tests/libs/ota/ota_tools/ota_tool_test.py
rename to packages/antlion/unit_tests/libs/ota/ota_tools/ota_tool_test.py
index b9e236b..09ddad2 100644
--- a/src/antlion/unit_tests/libs/ota/ota_tools/ota_tool_test.py
+++ b/packages/antlion/unit_tests/libs/ota/ota_tools/ota_tool_test.py
@@ -15,6 +15,7 @@
 # limitations under the License.
 
 import unittest
+
 from antlion.libs.ota.ota_tools import ota_tool
 
 
diff --git a/src/antlion/unit_tests/libs/ota/ota_tools/update_device_ota_tool_test.py b/packages/antlion/unit_tests/libs/ota/ota_tools/update_device_ota_tool_test.py
similarity index 99%
rename from src/antlion/unit_tests/libs/ota/ota_tools/update_device_ota_tool_test.py
rename to packages/antlion/unit_tests/libs/ota/ota_tools/update_device_ota_tool_test.py
index fc54452..6e8f3d0 100644
--- a/src/antlion/unit_tests/libs/ota/ota_tools/update_device_ota_tool_test.py
+++ b/packages/antlion/unit_tests/libs/ota/ota_tools/update_device_ota_tool_test.py
@@ -15,10 +15,11 @@
 # limitations under the License.
 
 import logging
-import mock
 import os
 import unittest
 
+import mock
+
 from antlion.controllers import android_device
 from antlion.libs.ota.ota_runners import ota_runner
 from antlion.libs.ota.ota_tools import update_device_ota_tool
diff --git a/src/antlion/unit_tests/libs/ota/ota_updater_test.py b/packages/antlion/unit_tests/libs/ota/ota_updater_test.py
similarity index 99%
rename from src/antlion/unit_tests/libs/ota/ota_updater_test.py
rename to packages/antlion/unit_tests/libs/ota/ota_updater_test.py
index ac92019..763500e 100644
--- a/src/antlion/unit_tests/libs/ota/ota_updater_test.py
+++ b/packages/antlion/unit_tests/libs/ota/ota_updater_test.py
@@ -14,8 +14,10 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import mock
 import unittest
+
+import mock
+
 from antlion.libs.ota import ota_updater
 from antlion.libs.ota.ota_runners import ota_runner
 
diff --git a/src/antlion/unit_tests/libs/proc/__init__.py b/packages/antlion/unit_tests/libs/proc/__init__.py
similarity index 100%
rename from src/antlion/unit_tests/libs/proc/__init__.py
rename to packages/antlion/unit_tests/libs/proc/__init__.py
diff --git a/src/antlion/unit_tests/libs/proc/process_test.py b/packages/antlion/unit_tests/libs/proc/process_test.py
similarity index 97%
rename from src/antlion/unit_tests/libs/proc/process_test.py
rename to packages/antlion/unit_tests/libs/proc/process_test.py
index e83ab46..d362c9b 100644
--- a/src/antlion/unit_tests/libs/proc/process_test.py
+++ b/packages/antlion/unit_tests/libs/proc/process_test.py
@@ -16,10 +16,10 @@
 import subprocess
 import unittest
 
-from antlion.libs.proc.process import Process
-from antlion.libs.proc.process import ProcessError
 import mock
 
+from antlion.libs.proc.process import Process, ProcessError
+
 
 class FakeThread(object):
     def __init__(self, target=None):
@@ -52,9 +52,7 @@
 
     @staticmethod
     def patch(imported_name, *args, **kwargs):
-        return mock.patch(
-            "antlion.libs.proc.process.%s" % imported_name, *args, **kwargs
-        )
+        return mock.patch(f"antlion.libs.proc.process.{imported_name}", *args, **kwargs)
 
     # set_on_output_callback
 
diff --git a/src/antlion/unit_tests/mock_controller.py b/packages/antlion/unit_tests/mock_controller.py
similarity index 100%
rename from src/antlion/unit_tests/mock_controller.py
rename to packages/antlion/unit_tests/mock_controller.py
diff --git a/src/antlion/unit_tests/test_data/1k_2k.raw b/packages/antlion/unit_tests/test_data/1k_2k.raw
similarity index 100%
rename from src/antlion/unit_tests/test_data/1k_2k.raw
rename to packages/antlion/unit_tests/test_data/1k_2k.raw
Binary files differ
diff --git a/src/antlion/unit_tests/test_suite.py b/packages/antlion/unit_tests/test_suite.py
similarity index 89%
rename from src/antlion/unit_tests/test_suite.py
rename to packages/antlion/unit_tests/test_suite.py
index 3194587..ad578d5 100755
--- a/src/antlion/unit_tests/test_suite.py
+++ b/packages/antlion/unit_tests/test_suite.py
@@ -58,7 +58,7 @@
     results = []
 
     for index, test in enumerate(suite._tests):
-        output_file = os.path.join(output_dir, "test_%s.output" % index)
+        output_file = os.path.join(output_dir, f"test_{index}.output")
 
         test_result = subprocess.Popen(
             [sys.executable, test_files[index]],
@@ -72,14 +72,14 @@
         try:
             failures = result.test_result.wait(timeout=60)
             if failures:
-                print("Failure logs for %s:" % result.test_filename, file=sys.stderr)
+                print(f"Failure logs for {result.test_filename}:", file=sys.stderr)
                 with open(result.output_file, "r") as out_file:
                     print(out_file.read(), file=sys.stderr)
-                all_failures.append(result.test_filename + " (failed)")
+                all_failures.append(f"{result.test_filename} (failed)")
         except subprocess.TimeoutExpired:
-            all_failures.append(result.test_filename + " (timed out)")
+            all_failures.append(f"{result.test_filename} (timed out)")
             print(
-                "The following test timed out: %r" % result.test_filename,
+                f"The following test timed out: {result.test_filename!r}",
                 file=sys.stderr,
             )
             with open(result.output_file, "r") as out_file:
diff --git a/packages/antlion/utils.py b/packages/antlion/utils.py
new file mode 100755
index 0000000..cf72e09
--- /dev/null
+++ b/packages/antlion/utils.py
@@ -0,0 +1,1164 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+import concurrent.futures
+import datetime
+import ipaddress
+import json
+import logging
+import os
+import platform
+import random
+import re
+import signal
+import socket
+import string
+import subprocess
+import time
+import traceback
+import zipfile
+from concurrent.futures import ThreadPoolExecutor
+from dataclasses import dataclass
+from typing import TYPE_CHECKING
+
+from mobly import signals
+
+from antlion.libs.proc import job
+from antlion.runner import CalledProcessError, Runner
+
+if TYPE_CHECKING:
+    from antlion.controllers.android_device import AndroidDevice
+    from antlion.controllers.fuchsia_device import FuchsiaDevice
+    from antlion.controllers.utils_lib.ssh.connection import SshConnection
+
+# File name length is limited to 255 chars on some OS, so we need to make sure
+# the file names we output fits within the limit.
+MAX_FILENAME_LEN = 255
+
+# All Fuchsia devices use this suffix for link-local mDNS host names.
+FUCHSIA_MDNS_TYPE = "_fuchsia._udp.local."
+
+# Default max seconds it takes to Duplicate Address Detection to finish before
+# assigning an IPv6 address.
+DAD_TIMEOUT_SEC = 30
+
+
+class ActsUtilsError(Exception):
+    """Generic error raised for exceptions in ACTS utils."""
+
+
+ascii_letters_and_digits = string.ascii_letters + string.digits
+valid_filename_chars = f"-_.{ascii_letters_and_digits}"
+
+models = (
+    "sprout",
+    "occam",
+    "hammerhead",
+    "bullhead",
+    "razor",
+    "razorg",
+    "shamu",
+    "angler",
+    "volantis",
+    "volantisg",
+    "mantaray",
+    "fugu",
+    "ryu",
+    "marlin",
+    "sailfish",
+)
+
+manufacture_name_to_model = {
+    "flo": "razor",
+    "flo_lte": "razorg",
+    "flounder": "volantis",
+    "flounder_lte": "volantisg",
+    "dragon": "ryu",
+}
+
+GMT_to_olson = {
+    "GMT-9": "America/Anchorage",
+    "GMT-8": "US/Pacific",
+    "GMT-7": "US/Mountain",
+    "GMT-6": "US/Central",
+    "GMT-5": "US/Eastern",
+    "GMT-4": "America/Barbados",
+    "GMT-3": "America/Buenos_Aires",
+    "GMT-2": "Atlantic/South_Georgia",
+    "GMT-1": "Atlantic/Azores",
+    "GMT+0": "Africa/Casablanca",
+    "GMT+1": "Europe/Amsterdam",
+    "GMT+2": "Europe/Athens",
+    "GMT+3": "Europe/Moscow",
+    "GMT+4": "Asia/Baku",
+    "GMT+5": "Asia/Oral",
+    "GMT+6": "Asia/Almaty",
+    "GMT+7": "Asia/Bangkok",
+    "GMT+8": "Asia/Hong_Kong",
+    "GMT+9": "Asia/Tokyo",
+    "GMT+10": "Pacific/Guam",
+    "GMT+11": "Pacific/Noumea",
+    "GMT+12": "Pacific/Fiji",
+    "GMT+13": "Pacific/Tongatapu",
+    "GMT-11": "Pacific/Midway",
+    "GMT-10": "Pacific/Honolulu",
+}
+
+
+def abs_path(path):
+    """Resolve the '.' and '~' in a path to get the absolute path.
+
+    Args:
+        path: The path to expand.
+
+    Returns:
+        The absolute path of the input path.
+    """
+    return os.path.abspath(os.path.expanduser(path))
+
+
+def get_current_epoch_time():
+    """Current epoch time in milliseconds.
+
+    Returns:
+        An integer representing the current epoch time in milliseconds.
+    """
+    return int(round(time.time() * 1000))
+
+
+def get_current_human_time():
+    """Returns the current time in human readable format.
+
+    Returns:
+        The current time stamp in Month-Day-Year Hour:Min:Sec format.
+    """
+    return time.strftime("%m-%d-%Y %H:%M:%S ")
+
+
+def epoch_to_human_time(epoch_time):
+    """Converts an epoch timestamp to human readable time.
+
+    This essentially converts an output of get_current_epoch_time to an output
+    of get_current_human_time
+
+    Args:
+        epoch_time: An integer representing an epoch timestamp in milliseconds.
+
+    Returns:
+        A time string representing the input time.
+        None if input param is invalid.
+    """
+    if isinstance(epoch_time, int):
+        try:
+            d = datetime.datetime.fromtimestamp(epoch_time / 1000)
+            return d.strftime("%m-%d-%Y %H:%M:%S ")
+        except ValueError:
+            return None
+
+
+def get_timezone_olson_id():
+    """Return the Olson ID of the local (non-DST) timezone.
+
+    Returns:
+        A string representing one of the Olson IDs of the local (non-DST)
+        timezone.
+    """
+    tzoffset = int(time.timezone / 3600)
+    gmt = None
+    if tzoffset <= 0:
+        gmt = f"GMT+{-tzoffset}"
+    else:
+        gmt = f"GMT-{tzoffset}"
+    return GMT_to_olson[gmt]
+
+
+def get_next_device(test_bed_controllers, used_devices):
+    """Gets the next device in a list of testbed controllers
+
+    Args:
+        test_bed_controllers: A list of testbed controllers of a particular
+            type, for example a list ACTS Android devices.
+        used_devices: A list of devices that have been used.  This can be a
+            mix of devices, for example a fuchsia device and an Android device.
+    Returns:
+        The next device in the test_bed_controllers list or None if there are
+        no items that are not in the used devices list.
+    """
+    if test_bed_controllers:
+        device_list = test_bed_controllers
+    else:
+        raise ValueError("test_bed_controllers is empty.")
+    for used_device in used_devices:
+        if used_device in device_list:
+            device_list.remove(used_device)
+    if device_list:
+        return device_list[0]
+    else:
+        return None
+
+
+def find_files(paths, file_predicate):
+    """Locate files whose names and extensions match the given predicate in
+    the specified directories.
+
+    Args:
+        paths: A list of directory paths where to find the files.
+        file_predicate: A function that returns True if the file name and
+          extension are desired.
+
+    Returns:
+        A list of files that match the predicate.
+    """
+    file_list = []
+    if not isinstance(paths, list):
+        paths = [paths]
+    for path in paths:
+        p = abs_path(path)
+        for dirPath, subdirList, fileList in os.walk(p):
+            for fname in fileList:
+                name, ext = os.path.splitext(fname)
+                if file_predicate(name, ext):
+                    file_list.append((dirPath, name, ext))
+    return file_list
+
+
+def load_config(file_full_path, log_errors=True):
+    """Loads a JSON config file.
+
+    Returns:
+        A JSON object.
+    """
+    with open(file_full_path, "r") as f:
+        try:
+            return json.load(f)
+        except Exception as e:
+            if log_errors:
+                logging.error("Exception error to load %s: %s", f, e)
+            raise
+
+
+def rand_ascii_str(length: int) -> str:
+    """Generates a random string of specified length, composed of ascii letters
+    and digits.
+
+    Args:
+        length: The number of characters in the string.
+
+    Returns:
+        The random string generated.
+    """
+    letters = [random.choice(ascii_letters_and_digits) for i in range(length)]
+    return "".join(letters)
+
+
+def rand_hex_str(length: int) -> str:
+    """Generates a random string of specified length, composed of hex digits
+
+    Args:
+        length: The number of characters in the string.
+
+    Returns:
+        The random string generated.
+    """
+    letters = [random.choice(string.hexdigits) for i in range(length)]
+    return "".join(letters)
+
+
+# Thead/Process related functions.
+def concurrent_exec(func, param_list):
+    """Executes a function with different parameters pseudo-concurrently.
+
+    This is basically a map function. Each element (should be an iterable) in
+    the param_list is unpacked and passed into the function. Due to Python's
+    GIL, there's no true concurrency. This is suited for IO-bound tasks.
+
+    Args:
+        func: The function that parforms a task.
+        param_list: A list of iterables, each being a set of params to be
+            passed into the function.
+
+    Returns:
+        A list of return values from each function execution. If an execution
+        caused an exception, the exception object will be the corresponding
+        result.
+    """
+    with concurrent.futures.ThreadPoolExecutor(max_workers=30) as executor:
+        # Start the load operations and mark each future with its params
+        future_to_params = {executor.submit(func, *p): p for p in param_list}
+        return_vals = []
+        for future in concurrent.futures.as_completed(future_to_params):
+            params = future_to_params[future]
+            try:
+                return_vals.append(future.result())
+            except Exception as exc:
+                print(f"{params} generated an exception: {traceback.format_exc()}")
+                return_vals.append(exc)
+        return return_vals
+
+
+def exe_cmd(*cmds):
+    """Executes commands in a new shell.
+
+    Args:
+        cmds: A sequence of commands and arguments.
+
+    Returns:
+        The output of the command run.
+
+    Raises:
+        OSError is raised if an error occurred during the command execution.
+    """
+    cmd = " ".join(cmds)
+    proc = subprocess.Popen(
+        cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True
+    )
+    (out, err) = proc.communicate()
+    if not err:
+        return out
+    raise OSError(err)
+
+
+def require_sl4a(android_devices):
+    """Makes sure sl4a connection is established on the given AndroidDevice
+    objects.
+
+    Args:
+        android_devices: A list of AndroidDevice objects.
+
+    Raises:
+        AssertionError is raised if any given android device does not have SL4A
+        connection established.
+    """
+    for ad in android_devices:
+        msg = f"SL4A connection not established properly on {ad.serial}."
+        assert ad.droid, msg
+
+
+def _assert_subprocess_running(proc):
+    """Checks if a subprocess has terminated on its own.
+
+    Args:
+        proc: A subprocess returned by subprocess.Popen.
+
+    Raises:
+        ActsUtilsError is raised if the subprocess has stopped.
+    """
+    ret = proc.poll()
+    if ret is not None:
+        out, err = proc.communicate()
+        raise ActsUtilsError(
+            "Process %d has terminated. ret: %d, stderr: %s,"
+            " stdout: %s" % (proc.pid, ret, err, out)
+        )
+
+
+def start_standing_subprocess(cmd, check_health_delay=0, shell=True):
+    """Starts a long-running subprocess.
+
+    This is not a blocking call and the subprocess started by it should be
+    explicitly terminated with stop_standing_subprocess.
+
+    For short-running commands, you should use exe_cmd, which blocks.
+
+    You can specify a health check after the subprocess is started to make sure
+    it did not stop prematurely.
+
+    Args:
+        cmd: string, the command to start the subprocess with.
+        check_health_delay: float, the number of seconds to wait after the
+                            subprocess starts to check its health. Default is 0,
+                            which means no check.
+
+    Returns:
+        The subprocess that got started.
+    """
+    proc = subprocess.Popen(
+        cmd,
+        stdout=subprocess.PIPE,
+        stderr=subprocess.PIPE,
+        shell=shell,
+        preexec_fn=os.setpgrp,
+    )
+    logging.debug("Start standing subprocess with cmd: %s", cmd)
+    if check_health_delay > 0:
+        time.sleep(check_health_delay)
+        _assert_subprocess_running(proc)
+    return proc
+
+
+def stop_standing_subprocess(proc, kill_signal=signal.SIGTERM):
+    """Stops a subprocess started by start_standing_subprocess.
+
+    Before killing the process, we check if the process is running, if it has
+    terminated, ActsUtilsError is raised.
+
+    Catches and ignores the PermissionError which only happens on Macs.
+
+    Args:
+        proc: Subprocess to terminate.
+    """
+    pid = proc.pid
+    logging.debug("Stop standing subprocess %d", pid)
+    _assert_subprocess_running(proc)
+    try:
+        os.killpg(pid, kill_signal)
+    except PermissionError:
+        pass
+
+
+def wait_for_standing_subprocess(proc, timeout=None):
+    """Waits for a subprocess started by start_standing_subprocess to finish
+    or times out.
+
+    Propagates the exception raised by the subprocess.wait(.) function.
+    The subprocess.TimeoutExpired exception is raised if the process timed-out
+    rather then terminating.
+
+    If no exception is raised: the subprocess terminated on its own. No need
+    to call stop_standing_subprocess() to kill it.
+
+    If an exception is raised: the subprocess is still alive - it did not
+    terminate. Either call stop_standing_subprocess() to kill it, or call
+    wait_for_standing_subprocess() to keep waiting for it to terminate on its
+    own.
+
+    Args:
+        p: Subprocess to wait for.
+        timeout: An integer number of seconds to wait before timing out.
+    """
+    proc.wait(timeout)
+
+
+def sync_device_time(ad):
+    """Sync the time of an android device with the current system time.
+
+    Both epoch time and the timezone will be synced.
+
+    Args:
+        ad: The android device to sync time on.
+    """
+    ad.adb.shell("settings put global auto_time 0", ignore_status=True)
+    ad.adb.shell("settings put global auto_time_zone 0", ignore_status=True)
+    droid = ad.droid
+    droid.setTimeZone(get_timezone_olson_id())
+    droid.setTime(get_current_epoch_time())
+
+
+def set_ambient_display(ad, new_state):
+    """Set "Ambient Display" in Settings->Display
+
+    Args:
+        ad: android device object.
+        new_state: new state for "Ambient Display". True or False.
+    """
+    ad.adb.shell(f"settings put secure doze_enabled {1 if new_state else 0}")
+
+
+def set_location_service(ad, new_state):
+    """Set Location service on/off in Settings->Location
+
+    Args:
+        ad: android device object.
+        new_state: new state for "Location service".
+            If new_state is False, turn off location service.
+            If new_state if True, set location service to "High accuracy".
+    """
+    ad.adb.shell(
+        "content insert --uri "
+        " content://com.google.settings/partner --bind "
+        "name:s:network_location_opt_in --bind value:s:1"
+    )
+    ad.adb.shell(
+        "content insert --uri "
+        " content://com.google.settings/partner --bind "
+        "name:s:use_location_for_services --bind value:s:1"
+    )
+    if new_state:
+        ad.adb.shell("settings put secure location_mode 3")
+    else:
+        ad.adb.shell("settings put secure location_mode 0")
+
+
+def parse_ping_ouput(ad, count, out, loss_tolerance=20):
+    """Ping Parsing util.
+
+    Args:
+        ad: Android Device Object.
+        count: Number of ICMP packets sent
+        out: shell output text of ping operation
+        loss_tolerance: Threshold after which flag test as false
+    Returns:
+        False: if packet loss is more than loss_tolerance%
+        True: if all good
+    """
+    result = re.search(
+        r"(\d+) packets transmitted, (\d+) received, (\d+)% packet loss", out
+    )
+    if not result:
+        ad.log.info("Ping failed with %s", out)
+        return False
+
+    packet_loss = int(result.group(3))
+    packet_xmit = int(result.group(1))
+    packet_rcvd = int(result.group(2))
+    min_packet_xmit_rcvd = (100 - loss_tolerance) * 0.01
+    if (
+        packet_loss > loss_tolerance
+        or packet_xmit < count * min_packet_xmit_rcvd
+        or packet_rcvd < count * min_packet_xmit_rcvd
+    ):
+        ad.log.error(
+            "%s, ping failed with loss more than tolerance %s%%",
+            result.group(0),
+            loss_tolerance,
+        )
+        return False
+    ad.log.info("Ping succeed with %s", result.group(0))
+    return True
+
+
+def adb_shell_ping(
+    ad: AndroidDevice,
+    dest_ip: str,
+    count: int = 120,
+    timeout: int = 200,
+    loss_tolerance: int = 20,
+) -> bool:
+    """Ping utility using adb shell.
+
+    Args:
+        ad: Android Device Object.
+        count: Number of ICMP packets to send
+        dest_ip: hostname or IP address
+                 default www.google.com
+        timeout: timeout for icmp pings to complete.
+    """
+    ping_cmd = "ping -W 1"
+    if count:
+        ping_cmd += f" -c {count}"
+    if dest_ip:
+        ping_cmd += f" {dest_ip}"
+    try:
+        ad.log.info("Starting ping test to %s using adb command %s", dest_ip, ping_cmd)
+        out = ad.adb.shell(ping_cmd, timeout=timeout, ignore_status=True)
+        if not parse_ping_ouput(ad, count, out, loss_tolerance):
+            return False
+        return True
+    except Exception as e:
+        ad.log.warning("Ping Test to %s failed with exception %s", dest_ip, e)
+        return False
+
+
+def zip_directory(zip_name, src_dir):
+    """Compress a directory to a .zip file.
+
+    This implementation is thread-safe.
+
+    Args:
+        zip_name: str, name of the generated archive
+        src_dir: str, path to the source directory
+    """
+    with zipfile.ZipFile(zip_name, "w", zipfile.ZIP_DEFLATED) as zip:
+        for root, dirs, files in os.walk(src_dir):
+            for file in files:
+                path = os.path.join(root, file)
+                zip.write(path, os.path.relpath(path, src_dir))
+
+
+def unzip_maintain_permissions(zip_path, extract_location):
+    """Unzip a .zip file while maintaining permissions.
+
+    Args:
+        zip_path: The path to the zipped file.
+        extract_location: the directory to extract to.
+    """
+    with zipfile.ZipFile(zip_path, "r") as zip_file:
+        for info in zip_file.infolist():
+            _extract_file(zip_file, info, extract_location)
+
+
+def _extract_file(zip_file, zip_info, extract_location):
+    """Extracts a single entry from a ZipFile while maintaining permissions.
+
+    Args:
+        zip_file: A zipfile.ZipFile.
+        zip_info: A ZipInfo object from zip_file.
+        extract_location: The directory to extract to.
+    """
+    out_path = zip_file.extract(zip_info.filename, path=extract_location)
+    perm = zip_info.external_attr >> 16
+    os.chmod(out_path, perm)
+
+
+def get_command_uptime(command_regex: str):
+    """Returns the uptime for a given command.
+
+    Args:
+        command_regex: A regex that matches the command line given. Must be
+            pgrep compatible.
+    """
+    pid = job.run(f"pgrep -f {command_regex}").stdout.decode("utf-8")
+    runtime = ""
+    if pid:
+        runtime = job.run(f'ps -o etime= -p "{pid}"').stdout.decode("utf-8")
+    return runtime
+
+
+def get_device_process_uptime(adb, process):
+    """Returns the uptime of a device process."""
+    pid = adb.shell(f"pidof {process}", ignore_status=True)
+    runtime = ""
+    if pid:
+        runtime = adb.shell(f'ps -o etime= -p "{pid}"')
+    return runtime
+
+
+def is_valid_ipv4_address(address):
+    try:
+        socket.inet_pton(socket.AF_INET, address)
+    except AttributeError:  # no inet_pton here, sorry
+        try:
+            socket.inet_aton(address)
+        except socket.error:
+            return False
+        return address.count(".") == 3
+    except socket.error:  # not a valid address
+        return False
+
+    return True
+
+
+def is_valid_ipv6_address(address):
+    if "%" in address:
+        address = address.split("%")[0]
+    try:
+        socket.inet_pton(socket.AF_INET6, address)
+    except socket.error:  # not a valid address
+        return False
+    return True
+
+
+def get_interface_ip_addresses(
+    comm_channel: AndroidDevice | SshConnection | FuchsiaDevice,
+    interface: str,
+) -> dict[str, list[str]]:
+    """Gets all of the ip addresses, ipv4 and ipv6, associated with a
+       particular interface name.
+
+    Args:
+        comm_channel: How to send commands to a device.  Can be ssh, adb serial,
+            etc.  Must have the run function implemented.
+        interface: The interface name on the device, ie eth0
+
+    Returns:
+        A list of dictionaries of the the various IP addresses:
+            ipv4_private: Any 192.168, 172.16, 10, or 169.254 addresses
+            ipv4_public: Any IPv4 public addresses
+            ipv6_link_local: Any fe80:: addresses
+            ipv6_private_local: Any fd00:: addresses
+            ipv6_public: Any publicly routable addresses
+    """
+    # Local imports are used here to prevent cyclic dependency.
+    from antlion.controllers.android_device import AndroidDevice
+    from antlion.controllers.fuchsia_device import FuchsiaDevice
+    from antlion.controllers.utils_lib.ssh.connection import SshConnection
+
+    addrs: list[str] = []
+
+    if isinstance(comm_channel, AndroidDevice):
+        addrs = comm_channel.adb.shell(
+            f'ip -o addr show {interface} | awk \'{{gsub("/", " "); print $4}}\''
+        ).splitlines()
+    elif isinstance(comm_channel, SshConnection):
+        ip = comm_channel.run(["ip", "-o", "addr", "show", interface])
+        addrs = [
+            addr.replace("/", " ").split()[3]
+            for addr in ip.stdout.decode("utf-8").splitlines()
+        ]
+    elif isinstance(comm_channel, FuchsiaDevice):
+        for iface in comm_channel.honeydew_fd.netstack.list_interfaces():
+            if iface.name != interface:
+                continue
+            for ipv4_address in iface.ipv4_addresses:
+                addrs.append(str(ipv4_address))
+            for ipv6_address in iface.ipv6_addresses:
+                addrs.append(str(ipv6_address))
+    else:
+        raise ValueError("Unsupported method to send command to device.")
+
+    ipv4_private_local_addresses = []
+    ipv4_public_addresses = []
+    ipv6_link_local_addresses = []
+    ipv6_private_local_addresses = []
+    ipv6_public_addresses = []
+
+    for addr in addrs:
+        on_device_ip = ipaddress.ip_address(addr)
+        if on_device_ip.version == 4:
+            if on_device_ip.is_private:
+                ipv4_private_local_addresses.append(str(on_device_ip))
+            elif on_device_ip.is_global or (
+                # Carrier private doesn't have a property, so we check if
+                # all other values are left unset.
+                not on_device_ip.is_reserved
+                and not on_device_ip.is_unspecified
+                and not on_device_ip.is_link_local
+                and not on_device_ip.is_loopback
+                and not on_device_ip.is_multicast
+            ):
+                ipv4_public_addresses.append(str(on_device_ip))
+        elif on_device_ip.version == 6:
+            if on_device_ip.is_link_local:
+                ipv6_link_local_addresses.append(str(on_device_ip))
+            elif on_device_ip.is_private:
+                ipv6_private_local_addresses.append(str(on_device_ip))
+            elif on_device_ip.is_global:
+                ipv6_public_addresses.append(str(on_device_ip))
+
+    return {
+        "ipv4_private": ipv4_private_local_addresses,
+        "ipv4_public": ipv4_public_addresses,
+        "ipv6_link_local": ipv6_link_local_addresses,
+        "ipv6_private_local": ipv6_private_local_addresses,
+        "ipv6_public": ipv6_public_addresses,
+    }
+
+
+class AddressTimeout(signals.TestError):
+    pass
+
+
+class MultipleAddresses(signals.TestError):
+    pass
+
+
+def get_addr(
+    comm_channel: AndroidDevice | SshConnection | FuchsiaDevice,
+    interface: str,
+    addr_type: str = "ipv4_private",
+    timeout_sec: int | None = None,
+) -> str:
+    """Get the requested type of IP address for an interface; if an address is
+    not available, retry until the timeout has been reached.
+
+    Args:
+        addr_type: Type of address to get as defined by the return value of
+            utils.get_interface_ip_addresses.
+        timeout_sec: Seconds to wait to acquire an address if there isn't one
+            already available. If fetching an IPv4 address, the default is 3
+            seconds. If IPv6, the default is 30 seconds for Duplicate Address
+            Detection.
+
+    Returns:
+        A string containing the requested address.
+
+    Raises:
+        TestAbortClass: timeout_sec is None and invalid addr_type
+        AddressTimeout: No address is available after timeout_sec
+        MultipleAddresses: Several addresses are available
+    """
+    if not timeout_sec:
+        if "ipv4" in addr_type:
+            timeout_sec = 3
+        elif "ipv6" in addr_type:
+            timeout_sec = DAD_TIMEOUT_SEC
+        else:
+            raise signals.TestAbortClass(f'Unknown addr_type "{addr_type}"')
+
+    timeout = time.time() + timeout_sec
+    while time.time() < timeout:
+        ip_addrs = get_interface_ip_addresses(comm_channel, interface)[addr_type]
+        if len(ip_addrs) > 1:
+            raise MultipleAddresses(
+                f'Expected only one "{addr_type}" address, got {ip_addrs}'
+            )
+        elif len(ip_addrs) == 1:
+            return ip_addrs[0]
+
+    raise AddressTimeout(f'No available "{addr_type}" address after {timeout_sec}s')
+
+
+def get_interface_based_on_ip(runner: Runner, desired_ip_address: str) -> str:
+    """Gets the interface for a particular IP
+
+    Args:
+        comm_channel: How to send commands to a device.  Can be ssh, adb serial,
+            etc.  Must have the run function implemented.
+        desired_ip_address: The IP address that is being looked for on a device.
+
+    Returns:
+        The name of the test interface.
+
+    Raises:
+        RuntimeError: when desired_ip_address is not found
+    """
+
+    desired_ip_address = desired_ip_address.split("%", 1)[0]
+    ip = runner.run(["ip", "-o", "addr", "show"])
+    for line in ip.stdout.decode("utf-8").splitlines():
+        if desired_ip_address in line:
+            return line.split()[1]
+    raise RuntimeError(
+        f'IP "{desired_ip_address}" not found in list:\n{ip.stdout.decode("utf-8")}'
+    )
+
+
+def renew_linux_ip_address(runner: Runner, interface: str) -> None:
+    runner.run(f"sudo ip link set {interface} down")
+    runner.run(f"sudo ip link set {interface} up")
+    runner.run(f"sudo dhclient -r {interface}")
+    runner.run(f"sudo dhclient {interface}")
+
+
+def get_ping_command(
+    dest_ip: str,
+    count: int = 3,
+    interval: int = 1000,
+    timeout: int = 1000,
+    size: int = 56,
+    os_type: str = "Linux",
+    additional_ping_params: str = "",
+) -> str:
+    """Builds ping command string based on address type, os, and params.
+
+    Args:
+        dest_ip: string, address to ping (ipv4 or ipv6)
+        count: int, number of requests to send
+        interval: int, time in seconds between requests
+        timeout: int, time in seconds to wait for response
+        size: int, number of bytes to send,
+        os_type: string, os type of the source device (supports 'Linux',
+            'Darwin')
+        additional_ping_params: string, command option flags to
+            append to the command string
+
+    Returns:
+        The ping command.
+    """
+    if is_valid_ipv4_address(dest_ip):
+        ping_binary = "ping"
+    elif is_valid_ipv6_address(dest_ip):
+        ping_binary = "ping6"
+    else:
+        raise ValueError(f"Invalid ip addr: {dest_ip}")
+
+    if os_type == "Darwin":
+        if is_valid_ipv6_address(dest_ip):
+            # ping6 on MacOS doesn't support timeout
+            logging.debug("Ignoring timeout, as ping6 on MacOS does not support it.")
+            timeout_flag = []
+        else:
+            timeout_flag = ["-t", str(timeout / 1000)]
+    elif os_type == "Linux":
+        timeout_flag = ["-W", str(timeout / 1000)]
+    else:
+        raise ValueError("Invalid OS.  Only Linux and MacOS are supported.")
+
+    ping_cmd = [
+        ping_binary,
+        *timeout_flag,
+        "-c",
+        str(count),
+        "-i",
+        str(interval / 1000),
+        "-s",
+        str(size),
+        additional_ping_params,
+        dest_ip,
+    ]
+    return " ".join(ping_cmd)
+
+
+def ping(
+    comm_channel: Runner,
+    dest_ip: str,
+    count: int = 3,
+    interval: int = 1000,
+    timeout: int = 1000,
+    size: int = 56,
+    additional_ping_params: str = "",
+) -> PingResult:
+    """Generic linux ping function, supports local (acts.libs.proc.job) and
+    SshConnections (acts.libs.proc.job over ssh) to Linux based OSs and MacOS.
+
+    NOTES: This will work with Android over SSH, but does not function over ADB
+    as that has a unique return format.
+
+    Args:
+        comm_channel: communication channel over which to send ping command.
+            Must have 'run' function that returns at least command, stdout,
+            stderr, and exit_status (see acts.libs.proc.job)
+        dest_ip: address to ping (ipv4 or ipv6)
+        count: int, number of packets to send
+        interval: int, time in milliseconds between pings
+        timeout: int, time in milliseconds to wait for response
+        size: int, size of packets in bytes
+        additional_ping_params: string, command option flags to
+            append to the command string
+
+    Returns:
+        Dict containing:
+            command: string
+            exit_status: int (0 or 1)
+            stdout: string
+            stderr: string
+            transmitted: int, number of packets transmitted
+            received: int, number of packets received
+            packet_loss: int, percentage packet loss
+            time: int, time of ping command execution (in milliseconds)
+            rtt_min: float, minimum round trip time
+            rtt_avg: float, average round trip time
+            rtt_max: float, maximum round trip time
+            rtt_mdev: float, round trip time standard deviation
+
+        Any values that cannot be parsed are left as None
+    """
+    from antlion.controllers.utils_lib.ssh.connection import SshConnection
+
+    is_local = comm_channel == job
+    os_type = platform.system() if is_local else "Linux"
+    ping_cmd = get_ping_command(
+        dest_ip,
+        count=count,
+        interval=interval,
+        timeout=timeout,
+        size=size,
+        os_type=os_type,
+        additional_ping_params=additional_ping_params,
+    )
+
+    if isinstance(comm_channel, SshConnection) or is_local:
+        logging.debug(
+            "Running ping with parameters (count: %s, interval: %s, "
+            "timeout: %s, size: %s)",
+            count,
+            interval,
+            timeout,
+            size,
+        )
+        try:
+            ping_result: subprocess.CompletedProcess[
+                bytes
+            ] | CalledProcessError = comm_channel.run(ping_cmd)
+        except CalledProcessError as e:
+            ping_result = e
+    else:
+        raise ValueError(f"Unsupported comm_channel: {type(comm_channel)}")
+
+    summary = re.search(
+        "([0-9]+) packets transmitted.*?([0-9]+) received.*?([0-9]+)% packet "
+        "loss.*?time ([0-9]+)",
+        ping_result.stdout.decode("utf-8"),
+    )
+    rtt_stats = re.search(
+        "= ([0-9.]+)/([0-9.]+)/([0-9.]+)/([0-9.]+)", ping_result.stdout.decode("utf-8")
+    )
+    return PingResult(
+        exit_status=ping_result.returncode,
+        stdout=ping_result.stdout.decode("utf-8"),
+        stderr=ping_result.stderr.decode("utf-8"),
+        transmitted=int(summary.group(1)) if summary else None,
+        received=int(summary.group(2)) if summary else None,
+        time_ms=float(summary.group(4)) / 1000 if summary else None,
+        rtt_min_ms=float(rtt_stats.group(1)) if rtt_stats else None,
+        rtt_avg_ms=float(rtt_stats.group(2)) if rtt_stats else None,
+        rtt_max_ms=float(rtt_stats.group(3)) if rtt_stats else None,
+        rtt_mdev_ms=float(rtt_stats.group(4)) if rtt_stats else None,
+    )
+
+
+@dataclass
+class PingResult:
+    exit_status: int
+    stdout: str
+    stderr: str
+    transmitted: int | None
+    received: int | None
+    time_ms: float | None
+    rtt_min_ms: float | None
+    rtt_avg_ms: float | None
+    rtt_max_ms: float | None
+    rtt_mdev_ms: float | None
+
+    @property
+    def success(self) -> bool:
+        return self.exit_status == 0
+
+
+def ip_in_subnet(ip, subnet):
+    """Validate that ip is in a given subnet.
+
+    Args:
+        ip: string, ip address to verify (eg. '192.168.42.158')
+        subnet: string, subnet to check (eg. '192.168.42.0/24')
+
+    Returns:
+        True, if ip in subnet, else False
+    """
+    return ipaddress.ip_address(ip) in ipaddress.ip_network(subnet)
+
+
+def mac_address_str_to_list(mac_addr_str):
+    """Converts mac address string to list of decimal octets.
+
+    Args:
+        mac_addr_string: string, mac address
+            e.g. '12:34:56:78:9a:bc'
+
+    Returns
+        list, representing mac address octets in decimal
+            e.g. [18, 52, 86, 120, 154, 188]
+    """
+    return [int(octet, 16) for octet in mac_addr_str.split(":")]
+
+
+def mac_address_list_to_str(mac_addr_list: bytes) -> str:
+    """Converts list of decimal octets representing mac address to string.
+
+    Args:
+        mac_addr_list: list, representing mac address octets in decimal
+            e.g. [18, 52, 86, 120, 154, 188]
+
+    Returns:
+        string, mac address
+            e.g. '12:34:56:78:9a:bc'
+    """
+    # Print each octet as hex, right justified, width of 2, and fill with "0".
+    return ":".join([f"{octet:0>2x}" for octet in mac_addr_list])
+
+
+def get_fuchsia_mdns_ipv6_address(device_mdns_name):
+    """Finds the IPv6 link-local address of a Fuchsia device matching a mDNS
+    name.
+
+    Args:
+        device_mdns_name: name of Fuchsia device (e.g. gig-clone-sugar-slash)
+
+    Returns:
+        string, IPv6 link-local address
+    """
+    import psutil
+    from zeroconf import IPVersion, Zeroconf
+
+    if not device_mdns_name:
+        return None
+
+    def mdns_query(interface, address):
+        logging.info(
+            f'Sending mDNS query for device "{device_mdns_name}" using "{address}"'
+        )
+        try:
+            zeroconf = Zeroconf(ip_version=IPVersion.V6Only, interfaces=[address])
+        except RuntimeError as e:
+            if "No adapter found for IP address" in e.args[0]:
+                # Most likely, a device went offline and its control
+                # interface was deleted. This is acceptable since the
+                # device that went offline isn't guaranteed to be the
+                # device we're searching for.
+                logging.warning(f'No adapter found for "{address}"')
+                return None
+            raise
+
+        device_records = zeroconf.get_service_info(
+            FUCHSIA_MDNS_TYPE, f"{device_mdns_name}.{FUCHSIA_MDNS_TYPE}"
+        )
+
+        if device_records:
+            for device_address in device_records.parsed_addresses():
+                device_ip_address = ipaddress.ip_address(device_address)
+                scoped_address = f"{device_address}%{interface}"
+                if (
+                    device_ip_address.version == 6
+                    and device_ip_address.is_link_local
+                    and ping(job, dest_ip=scoped_address).success
+                ):
+                    logging.info(
+                        f'Found device "{device_mdns_name}" at "{scoped_address}"'
+                    )
+                    zeroconf.close()
+                    del zeroconf
+                    return scoped_address
+
+        zeroconf.close()
+        del zeroconf
+        return None
+
+    with ThreadPoolExecutor() as executor:
+        futures = []
+
+        interfaces = psutil.net_if_addrs()
+        for interface in interfaces:
+            for addr in interfaces[interface]:
+                address = addr.address.split("%")[0]
+                if (
+                    addr.family == socket.AF_INET6
+                    and ipaddress.ip_address(address).is_link_local
+                    and address != "fe80::1"
+                ):
+                    futures.append(executor.submit(mdns_query, interface, address))
+
+        for future in futures:
+            addr = future.result()
+            if addr:
+                return addr
+
+    logging.error(f'Unable to find IP address for device "{device_mdns_name}"')
+    return None
+
+
+def get_device(devices, device_type):
+    """Finds a unique device with the specified "device_type" attribute from a
+    list. If none is found, defaults to the first device in the list.
+
+    Example:
+        get_device(android_devices, device_type="DUT")
+        get_device(fuchsia_devices, device_type="DUT")
+        get_device(android_devices + fuchsia_devices, device_type="DUT")
+
+    Args:
+        devices: A list of device controller objects.
+        device_type: (string) Type of device to find, specified by the
+            "device_type" attribute.
+
+    Returns:
+        The matching device controller object, or the first device in the list
+        if not found.
+
+    Raises:
+        ValueError is raised if none or more than one device is
+        matched.
+    """
+    if not devices:
+        raise ValueError("No devices available")
+
+    matches = [
+        d for d in devices if hasattr(d, "device_type") and d.device_type == device_type
+    ]
+
+    if len(matches) == 0:
+        # No matches for the specified "device_type", use the first device
+        # declared.
+        return devices[0]
+    if len(matches) > 1:
+        # Specifing multiple devices with the same "device_type" is a
+        # configuration error.
+        raise ValueError(
+            f'More than one device matching "device_type" == "{device_type}"'
+        )
+
+    return matches[0]
diff --git a/packages/antlion/validation.py b/packages/antlion/validation.py
new file mode 100644
index 0000000..d574fa1
--- /dev/null
+++ b/packages/antlion/validation.py
@@ -0,0 +1,169 @@
+#!/usr/bin/env python3
+#
+# Copyright 2023 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+from typing import Collection, Literal, Mapping, TypeGuard, TypeVar, overload
+
+from mobly import signals
+
+
+class ValidatorError(signals.TestAbortClass):
+    pass
+
+
+class FieldNotFoundError(ValidatorError):
+    pass
+
+
+class FieldTypeError(ValidatorError):
+    pass
+
+
+T = TypeVar("T")
+
+
+class _NO_DEFAULT:
+    pass
+
+
+class MapValidator:
+    def __init__(self, map: Mapping[str, object]) -> None:
+        self.map = map
+
+    @overload
+    def get(self, type: type[T], key: str, default: None) -> T | None:
+        ...
+
+    @overload
+    def get(
+        self, type: type[T], key: str, default: T | _NO_DEFAULT = _NO_DEFAULT()
+    ) -> T:
+        ...
+
+    def get(
+        self, type: type[T], key: str, default: T | None | _NO_DEFAULT = _NO_DEFAULT()
+    ) -> T | None:
+        """Access the map requiring a value type at the specified key.
+
+        If default is set and the map does not contain the specified key, the
+        default will be returned.
+
+        Args:
+            type: Expected type of the value
+            key: Key to index into the map with
+            default: Default value when the map does not contain key
+
+        Returns:
+            Value of the expected type, or None if default is None.
+
+        Raises:
+            FieldNotFound: when default is not set and the map does not contain
+                the specified key
+            FieldTypeError: when the value at the specified key is not the
+                expected type
+        """
+        if key not in self.map:
+            if isinstance(default, type) or default is None:
+                return default
+            raise FieldNotFoundError(
+                f'Required field "{key}" is missing; expected {type.__name__}'
+            )
+        val = self.map[key]
+        if val is None and default is None:
+            return None
+        if not isinstance(val, type):
+            raise FieldTypeError(
+                f'Expected "{key}" to be {type.__name__}, got {describe_type(val)}'
+            )
+        return val
+
+    @overload
+    def list(self, key: str) -> ListValidator:
+        ...
+
+    @overload
+    def list(self, key: str, optional: Literal[False]) -> ListValidator:
+        ...
+
+    @overload
+    def list(self, key: str, optional: Literal[True]) -> ListValidator | None:
+        ...
+
+    def list(self, key: str, optional: bool = False) -> ListValidator | None:
+        """Access the map requiring a list at the specified key.
+
+        If optional is True and the map does not contain the specified key, None
+        will be returned.
+
+        Args:
+            key: Key to index into the map with
+            optional: If True, will return None if the map does not contain key
+
+        Returns:
+            ListValidator or None if optional is True.
+
+        Raises:
+            FieldNotFound: when optional is False and the map does not contain
+                the specified key
+            FieldTypeError: when the value at the specified key is not a list
+        """
+        if optional:
+            val = self.get(list, key, None)
+        else:
+            val = self.get(list, key)
+        return None if val is None else ListValidator(key, val)
+
+
+class ListValidator:
+    def __init__(self, name: str, val: list[object]) -> None:
+        self.name = name
+        self.val = val
+
+    def all(self, type: type[T]) -> list[T]:
+        """Access the list requiring all elements to be the specified type.
+
+        Args:
+            type: Expected type of all elements
+
+        Raises:
+            FieldTypeError: when an element is not the expected type
+        """
+        if not is_list_of(self.val, type):
+            raise FieldTypeError(
+                f'Expected "{self.name}" to be list[{type.__name__}], '
+                f"got {describe_type(self.val)}"
+            )
+        return self.val
+
+
+def describe_type(o: object) -> str:
+    """Describe the complete type of the object.
+
+    Different from type() by recursing when a mapping or collection is found.
+    """
+    if isinstance(o, Mapping):
+        keys = set([describe_type(k) for k in o.keys()])
+        values = set([describe_type(v) for v in o.values()])
+        return f'dict[{" | ".join(keys)}, {" | ".join(values)}]'
+    if isinstance(o, Collection) and not isinstance(o, str):
+        elements = set([describe_type(x) for x in o])
+        return f'list[{" | ".join(elements)}]'
+    return type(o).__name__
+
+
+def is_list_of(val: list[object], type: type[T]) -> TypeGuard[list[T]]:
+    return all(isinstance(x, type) for x in val)
diff --git a/pyproject.toml b/pyproject.toml
index b385122..d099de1 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -8,38 +8,35 @@
 name = "antlion"
 description = "Host-driven, hardware-agnostic Fuchsia connectivity tests"
 requires-python = ">=3.8"
-license = {text = "Apache-2.0"}
+license = { text = "Apache-2.0" }
 dynamic = ["version"]
 readme = "README.md"
-dependencies = [
-    "mobly==1.12.0",
-    "pyyaml>=5.1",
-    "tenacity~=8.0",
-]
+dependencies = ["mobly==1.12.2", "pyyaml>=5.1", "tenacity~=8.0"]
 
 [project.optional-dependencies]
 # Required to support development tools
 dev = [
-    "mock",    # required for unit tests
-    "shiv",    # packaging python
-    "toml",    # required for pyproject.toml
-    "vulture", # finds unused code
-    "black",   # code formatting
+    "autoflake~=2.1", # remove unused code
+    "black~=23.3",    # code formatting
+    "isort~=5.12",    # import sorting
+    "mock~=5.0",      # required for unit tests
+    "mypy~=1.8",      # static type checking
+    "shiv~=1.0",      # packaging python
+    "toml==0.10.2",   # required for pyproject.toml
+    "vulture~=2.11",   # finds unused code
+
+    # Library stubs for type checking
+    "types-PyYAML~=6.0",
+    "types-mock~=5.0",
+    "types-psutil~=5.9",
 ]
 digital_loggers_pdu = ["dlipower"]
 html_graphing = ["bokeh"]
-flash = ["usbinfo"]
 mdns = ["psutil", "zeroconf"]
-android = [
-    "numpy",
-    "scapy",
-]
-
-[project.scripts]
-antlion = "antlion.bin.act:main"
+android = ["numpy", "scapy"]
 
 [tool.setuptools.packages.find]
-where = ["src"]
+where = ["packages"]
 
 [tool.autoflake]
 imports = [
@@ -51,11 +48,100 @@
     "numpy",
     "scapy",
     "tenacity",
-    "usbinfo",
     "zeroconf",
 ]
 
-[tools.vulture]
-paths = ["src"]
+[tool.isort]
+profile = "black"
+known_local_folder = ["antlion"]
+
+[tool.mypy]
+mypy_path = "stubs, $FUCHSIA_DIR/src/testing/end_to_end/honeydew, $FUCHSIA_DIR/src/developer/ffx/lib/fuchsia-controller/python"
+python_version = "3.11"
+
+#
+# Disallow dynamic typing
+#
+
+#disallow_any_unimported = true
+#disallow_any_expr = true
+#disallow_any_decorated = true
+#disallow_any_explicit = true
+#disallow_any_generics = true
+disallow_subclassing_any = true
+
+#
+# Untyped definitions and calls
+#
+
+#disallow_untyped_calls = true
+#disallow_untyped_defs = true
+#disallow_incomplete_defs = true
+check_untyped_defs = true
+disallow_untyped_decorators = true
+
+#
+# Configuring warnings
+#
+
+warn_redundant_casts = true
+#warn_unused_ignores = true
+warn_no_return = true
+#warn_return_any = true
+#warn_unreachable = true
+
+#
+# Miscellaneous strictness flags
+#
+
+strict_equality = true
+
+exclude = [
+    # TODO(http://b/285950835): Fix typing of relevant Fuchsia-related utilities and
+    # tests
+    "packages/antlion/unit_tests/",
+
+    # TODO(http://b/274619290): Remove the following files when the migration from ACTS
+    # to Mobly is complete.
+    "packages/antlion/base_test.py",
+    "packages/antlion/context.py",
+    "packages/antlion/libs/yaml_writer.py",
+
+    # TODO(http://b/285950976): Fix typing of non-Fuchsia related controllers and test
+    # utilities, or remove if no longer relevant.
+    "packages/antlion/controllers/adb.py",
+    "packages/antlion/controllers/android_device.py",
+    "packages/antlion/controllers/pdu_lib/digital_loggers/webpowerswitch.py",
+    "packages/antlion/controllers/sl4a_lib/event_dispatcher.py",
+    "packages/antlion/controllers/sl4a_lib/rpc_client.py",
+    "packages/antlion/controllers/sl4a_lib/sl4a_manager.py",
+    "packages/antlion/controllers/sniffer_lib/local/tcpdump.py",
+    "packages/antlion/controllers/sniffer_lib/local/tshark.py",
+    "packages/antlion/libs/logging/log_stream.py",
+    "packages/antlion/libs/ota/ota_runners/ota_runner_factory.py",
+    "packages/antlion/libs/ota/ota_tools/ota_tool_factory.py",
+    "setup.py",
+
+    "stubs/mobly/",
+]
+
+[[tool.mypy.overrides]]
+module = [
+    "png",
+    "fidl.*",
+    "fuchsia_controller_internal",
+    "fuchsia_inspect",
+]
+ignore_missing_imports = true
+
+[[tool.mypy.overrides]]
+module = [
+    "fidl.*",
+    "fuchsia_controller_py",
+]
+ignore_errors = true
+
+[tool.vulture]
+paths = ["packages", "tests"]
 sort_by_size = true
 min_confidence = 80
diff --git a/runner/BUILD.gn b/runner/BUILD.gn
index d405592..1986b59 100644
--- a/runner/BUILD.gn
+++ b/runner/BUILD.gn
@@ -1,13 +1,19 @@
+# Copyright 2024 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
 import("//build/rust/rustc_binary.gni")
 import("//build/rust/rustc_test.gni")
 
+assert(is_host, "antlion-runner only supported on the host toolchain")
+
 rustc_binary("runner") {
   output_name = "antlion-runner"
   edition = "2021"
   with_unit_tests = true
 
   deps = [
-    "//src/developer/ffx/lib/netext:lib($host_toolchain)",
+    "//src/developer/ffx/lib/netext:lib",
     "//src/lib/mdns/rust:mdns",
     "//src/lib/network/packet",
     "//third_party/rust_crates:anyhow",
@@ -15,10 +21,12 @@
     "//third_party/rust_crates:home",
     "//third_party/rust_crates:itertools",
     "//third_party/rust_crates:lazy_static",
+    "//third_party/rust_crates:libc",
     "//third_party/rust_crates:nix",
     "//third_party/rust_crates:serde",
     "//third_party/rust_crates:serde_json",
     "//third_party/rust_crates:serde_yaml",
+    "//third_party/rust_crates:signal-hook",
     "//third_party/rust_crates:socket2",
     "//third_party/rust_crates:thiserror",
   ]
@@ -46,5 +54,5 @@
 
 group("tests") {
   testonly = true
-  deps = [ ":runner_test($host_toolchain)" ]
+  deps = [ ":runner_test" ]
 }
diff --git a/runner/README.md b/runner/README.md
index 45c926a..169394a 100644
--- a/runner/README.md
+++ b/runner/README.md
@@ -10,33 +10,41 @@
 can spot simple mistakes before code review!
 
 1. Build Fuchsia with antlion support
+
    ```sh
    jiri update -gc # if you haven't updated in a while
    fx set workstation_eng_paused.qemu-x64 \
       --with-host //third_party/antlion:e2e_tests \
-      --with-host //third_party/antlion:tests \
-      --with //src/testing/sl4f
+      --with-host //third_party/antlion:tests
    fx build # if you haven't built in a while
    ```
+
 2. Start the package server. Keep this running in the background.
+
    ```sh
    fx serve
    ```
+
 3. In a separate terminal, start the emulator with access to external networks.
+
    ```sh
-   ffx emu stop && ffx emu start -H --net tap && ffx log
+   fx ffx emu stop && fx ffx emu start -H --net tap && fx ffx log
    ```
+
 4. In a separate terminal, run a test
+
    ```sh
    fx test --e2e --output //third_party/antlion:sl4f_sanity_test
    ```
 
-> Note: Local testing with auxiliary devices is not supported by antlion runner.
-Use antlion directly for these cases; see the antlion [README](../README.md).
+## Using a specified config file
+
+```sh
+fx test --e2e --output //third_party/antlion:sl4f_sanity_test -- --config-override $(pwd)/config.yaml
+```
 
 ## Testing
 
 ```sh
-fx set core.qemu-x64 --with //vendor/google/build/python/antlion/runner:tests
-fx test --output //vendor/google/build/python/antlion/runner:runner_test
+fx test --output //third_party/antlion/runner:runner_test
 ```
diff --git a/runner/src/config.rs b/runner/src/config.rs
index 571a8ab..c2afce0 100644
--- a/runner/src/config.rs
+++ b/runner/src/config.rs
@@ -65,15 +65,19 @@
 #[derive(Clone, Debug, Serialize)]
 /// A Fuchsia device for use with antlion as defined by [fuchsia_device.py].
 ///
-/// [fuchsia_device.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/src/antlion/controllers/fuchsia_device.py
+/// [fuchsia_device.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/packages/antlion/controllers/fuchsia_device.py
 pub(crate) struct Fuchsia {
     pub mdns_name: String,
     pub ip: IpAddr,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub ssh_port: Option<u16>,
     pub take_bug_report_on_fail: bool,
     pub ssh_binary_path: PathBuf,
     #[serde(skip_serializing_if = "Option::is_none")]
     pub ssh_config: Option<PathBuf>,
     pub ffx_binary_path: PathBuf,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub ffx_subtools_search_path: Option<PathBuf>,
     pub ssh_priv_key: PathBuf,
     #[serde(rename = "PduDevice", skip_serializing_if = "Option::is_none")]
     pub pdu_device: Option<PduRef>,
@@ -98,7 +102,7 @@
 #[derive(Clone, Debug, Serialize)]
 /// Declares an access point for use with antlion as defined by [access_point.py].
 ///
-/// [access_point.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/src/antlion/controllers/access_point.py
+/// [access_point.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/packages/antlion/controllers/access_point.py
 pub(crate) struct AccessPoint {
     pub wan_interface: String,
     pub ssh_config: SshConfig,
@@ -112,6 +116,8 @@
 pub(crate) struct SshConfig {
     pub ssh_binary_path: PathBuf,
     pub host: IpAddr,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub port: Option<u16>,
     pub user: String,
     pub identity_file: PathBuf,
 }
@@ -132,7 +138,7 @@
 #[serde(rename_all = "PascalCase")]
 /// Declares an attenuator for use with antlion as defined by [attenuator.py].
 ///
-/// [access_point.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/src/antlion/controllers/attenuator.py
+/// [access_point.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/packages/antlion/controllers/attenuator.py
 pub(crate) struct Attenuator {
     pub model: String,
     pub instrument_count: u8,
@@ -144,7 +150,7 @@
 #[derive(Clone, Debug, Eq, PartialEq, Serialize)]
 /// Declares a power distribution unit for use with antlion as defined by [pdu.py].
 ///
-/// [pdu.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/src/antlion/controllers/pdu.py
+/// [pdu.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/packages/antlion/controllers/pdu.py
 pub(crate) struct Pdu {
     pub device: String,
     pub host: IpAddr,
@@ -153,7 +159,7 @@
 #[derive(Clone, Debug, Serialize)]
 /// Declares an iPerf3 server for use with antlion as defined by [iperf_server.py].
 ///
-/// [iperf_server.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/src/antlion/controllers/iperf_server.py
+/// [iperf_server.py]: https://cs.opensource.google/fuchsia/fuchsia/+/main:third_party/antlion/packages/antlion/controllers/iperf_server.py
 pub(crate) struct IPerfServer {
     pub ssh_config: SshConfig,
     pub port: u16,
diff --git a/runner/src/driver/infra.rs b/runner/src/driver/infra.rs
index ceff26e..424f014 100644
--- a/runner/src/driver/infra.rs
+++ b/runner/src/driver/infra.rs
@@ -2,8 +2,7 @@
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
-use crate::config::PduRef;
-use crate::config::{self, Config};
+use crate::config::{self, Config, PduRef};
 use crate::driver::Driver;
 use crate::env::Environment;
 use crate::net::IpAddr;
@@ -57,6 +56,7 @@
         env: E,
         ssh_binary: PathBuf,
         ffx_binary: PathBuf,
+        ffx_subtools_search_path: Option<PathBuf>,
     ) -> Result<Self, InfraDriverError> {
         let config_path = match env.var(ENV_TESTBED_CONFIG) {
             Ok(p) => PathBuf::from(p),
@@ -101,7 +101,13 @@
 
         Ok(InfraDriver {
             output_dir: output_dir.clone(),
-            config: InfraDriver::parse_targets(targets, ssh_binary, ffx_binary, output_dir)?,
+            config: InfraDriver::parse_targets(
+                targets,
+                ssh_binary,
+                ffx_binary,
+                ffx_subtools_search_path,
+                output_dir,
+            )?,
         })
     }
 
@@ -109,6 +115,7 @@
         targets: Vec<InfraTarget>,
         ssh_binary: PathBuf,
         ffx_binary: PathBuf,
+        ffx_subtools_search_path: Option<PathBuf>,
         output_dir: PathBuf,
     ) -> Result<Config, InfraDriverError> {
         let mut fuchsia_devices: Vec<config::Fuchsia> = vec![];
@@ -149,7 +156,7 @@
         let mut register_pdu = |p: Option<PduRef>| -> Result<(), InfraDriverError> {
             if let Some(PduRef { device, ip, port }) = p {
                 register_port(ip.clone(), port)?;
-                let new = config::Pdu { device: device.clone(), host: ip.clone() };
+                let new = config::Pdu { device, host: ip.clone() };
                 if let Some(old) = pdus.insert(ip.clone(), new.clone()) {
                     if old != new {
                         return Err(ConfigError::DuplicateIp { ip }.into());
@@ -199,11 +206,13 @@
                     fuchsia_devices.push(config::Fuchsia {
                         mdns_name: nodename.clone(),
                         ip: ip.clone(),
+                        ssh_port: None,
                         take_bug_report_on_fail: true,
                         ssh_binary_path: ssh_binary.clone(),
                         // TODO(http://b/244747218): Remove when ssh_config is refactored away
                         ssh_config: None,
                         ffx_binary_path: ffx_binary.clone(),
+                        ffx_subtools_search_path: ffx_subtools_search_path.clone(),
                         ssh_priv_key: ssh_key.clone(),
                         pdu_device: pdu.clone(),
                         hard_reboot_on_fail: true,
@@ -219,6 +228,7 @@
                         ssh_config: config::SshConfig {
                             ssh_binary_path: ssh_binary.clone(),
                             host: ip.clone(),
+                            port: None,
                             user: "root".to_string(),
                             identity_file: ssh_key.clone(),
                         },
@@ -241,6 +251,7 @@
                         ssh_config: config::SshConfig {
                             ssh_binary_path: ssh_binary.clone(),
                             host: ip.clone(),
+                            port: None,
                             user: user.to_string(),
                             identity_file: ssh_key.clone(),
                         },
@@ -362,9 +373,8 @@
 mod test {
     use super::*;
 
-    use crate::run;
-    use crate::runner::Runner;
-    use crate::{env::Environment, runner::ExitStatus};
+    use crate::generate_config_and_run;
+    use crate::runner::{ExitStatus, Runner};
 
     use std::ffi::OsStr;
 
@@ -428,7 +438,7 @@
         let ffx = NamedTempFile::new().unwrap();
         let env = MockEnvironment { config: None, out_dir: None };
 
-        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf());
+        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf(), None);
         assert_matches!(got, Err(InfraDriverError::NotDetected(_)));
     }
 
@@ -439,7 +449,7 @@
         let out_dir = TempDir::new().unwrap();
         let env = MockEnvironment { config: None, out_dir: Some(out_dir.path().to_path_buf()) };
 
-        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf());
+        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf(), None);
         assert_matches!(got, Err(InfraDriverError::NotDetected(v)) if v == ENV_TESTBED_CONFIG);
     }
 
@@ -465,7 +475,7 @@
         let env =
             MockEnvironment { config: Some(testbed_config.path().to_path_buf()), out_dir: None };
 
-        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf());
+        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf(), None);
         assert_matches!(got, Err(InfraDriverError::NotDetected(v)) if v == ENV_OUT_DIR);
     }
 
@@ -483,7 +493,7 @@
             out_dir: Some(out_dir.path().to_path_buf()),
         };
 
-        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf());
+        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf(), None);
         assert_matches!(got, Err(_));
     }
 
@@ -491,6 +501,7 @@
     fn infra() {
         let ssh = NamedTempFile::new().unwrap();
         let ssh_key = NamedTempFile::new().unwrap();
+        let ffx_subtools = TempDir::new().unwrap();
         let ffx = NamedTempFile::new().unwrap();
         let out_dir = TempDir::new().unwrap();
 
@@ -512,15 +523,21 @@
             config: Some(testbed_config.path().to_path_buf()),
             out_dir: Some(out_dir.path().to_path_buf()),
         };
-        let driver =
-            InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf()).unwrap();
-        run(runner, driver, None).unwrap();
+        let driver = InfraDriver::new(
+            env,
+            ssh.path().to_path_buf(),
+            ffx.path().to_path_buf(),
+            Some(ffx_subtools.path().to_path_buf()),
+        )
+        .unwrap();
+        generate_config_and_run(runner, driver, None).unwrap();
 
         let got = fs::read_to_string(out_dir.path().join("config.yaml")).unwrap();
 
         let ssh_path = ssh.path().display().to_string();
         let ssh_key_path = ssh_key.path().display().to_string();
         let ffx_path = ffx.path().display().to_string();
+        let ffx_subtools_path = ffx_subtools.path().display();
         let out_path = out_dir.path().display();
         let want = formatdoc! {r#"
         TestBeds:
@@ -532,6 +549,7 @@
               take_bug_report_on_fail: true
               ssh_binary_path: {ssh_path}
               ffx_binary_path: {ffx_path}
+              ffx_subtools_search_path: {ffx_subtools_path}
               ssh_priv_key: {ssh_key_path}
               hard_reboot_on_fail: true
         MoblyParams:
@@ -546,6 +564,7 @@
         let ssh = NamedTempFile::new().unwrap();
         let ssh_key = NamedTempFile::new().unwrap();
         let ffx = NamedTempFile::new().unwrap();
+        let ffx_subtools = TempDir::new().unwrap();
         let out_dir = TempDir::new().unwrap();
 
         let testbed_config = NamedTempFile::new().unwrap();
@@ -572,21 +591,27 @@
             config: Some(testbed_config.path().to_path_buf()),
             out_dir: Some(out_dir.path().to_path_buf()),
         };
-        let driver =
-            InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf()).unwrap();
+        let driver = InfraDriver::new(
+            env,
+            ssh.path().to_path_buf(),
+            ffx.path().to_path_buf(),
+            Some(ffx_subtools.path().to_path_buf()),
+        )
+        .unwrap();
         let params = "
             sl4f_sanity_test_params:
                 merged_with: true
                 can_overwrite: true
         ";
         let params = serde_yaml::from_str(params).unwrap();
-        run(runner, driver, Some(params)).unwrap();
+        generate_config_and_run(runner, driver, Some(params)).unwrap();
 
         let got = fs::read_to_string(out_dir.path().join("config.yaml")).unwrap();
 
         let ssh_path = ssh.path().display().to_string();
         let ssh_key_path = ssh_key.path().display().to_string();
         let ffx_path = ffx.path().display().to_string();
+        let ffx_subtools_path = ffx_subtools.path().display();
         let out_path = out_dir.path().display();
         let want = formatdoc! {r#"
         TestBeds:
@@ -598,6 +623,7 @@
               take_bug_report_on_fail: true
               ssh_binary_path: {ssh_path}
               ffx_binary_path: {ffx_path}
+              ffx_subtools_search_path: {ffx_subtools_path}
               ssh_priv_key: {ssh_key_path}
               hard_reboot_on_fail: true
           TestParams:
@@ -627,6 +653,7 @@
         let ssh = NamedTempFile::new().unwrap();
         let ssh_key = NamedTempFile::new().unwrap();
         let ffx = NamedTempFile::new().unwrap();
+        let ffx_subtools = TempDir::new().unwrap();
         let out_dir = TempDir::new().unwrap();
 
         let testbed_config = NamedTempFile::new().unwrap();
@@ -674,15 +701,21 @@
             config: Some(testbed_config.path().to_path_buf()),
             out_dir: Some(out_dir.path().to_path_buf()),
         };
-        let driver =
-            InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf()).unwrap();
-        run(runner, driver, None).unwrap();
+        let driver = InfraDriver::new(
+            env,
+            ssh.path().to_path_buf(),
+            ffx.path().to_path_buf(),
+            Some(ffx_subtools.path().to_path_buf()),
+        )
+        .unwrap();
+        generate_config_and_run(runner, driver, None).unwrap();
 
         let got = std::fs::read_to_string(out_dir.path().join("config.yaml")).unwrap();
 
         let ssh_path = ssh.path().display().to_string();
         let ssh_key_path = ssh_key.path().display().to_string();
         let ffx_path = ffx.path().display().to_string();
+        let ffx_subtools_path = ffx_subtools.path().display();
         let out_path = out_dir.path().display();
         let want = formatdoc! {r#"
         TestBeds:
@@ -694,6 +727,7 @@
               take_bug_report_on_fail: true
               ssh_binary_path: {ssh_path}
               ffx_binary_path: {ffx_path}
+              ffx_subtools_search_path: {ffx_subtools_path}
               ssh_priv_key: {ssh_key_path}
               PduDevice:
                 device: synaccess.np02b
@@ -787,7 +821,7 @@
             config: Some(testbed_config.path().to_path_buf()),
             out_dir: Some(out_dir.path().to_path_buf()),
         };
-        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf());
+        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf(), None);
         assert_matches!(got,
             Err(InfraDriverError::Config(ConfigError::DuplicatePort { ip, port }))
                 if ip == pdu_ip && port == pdu_port
@@ -835,7 +869,7 @@
             out_dir: Some(out_dir.path().to_path_buf()),
         };
         assert_matches!(
-            InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf()),
+            InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf(), None),
             Err(InfraDriverError::Config(ConfigError::DuplicateIp { ip }))
                 if ip == duplicate_ip
         );
@@ -871,7 +905,7 @@
             config: Some(testbed_config.path().to_path_buf()),
             out_dir: Some(out_dir.path().to_path_buf()),
         };
-        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf());
+        let got = InfraDriver::new(env, ssh.path().to_path_buf(), ffx.path().to_path_buf(), None);
         assert_matches!(got,
             Err(InfraDriverError::Config(ConfigError::DuplicateIp { ip }))
                 if ip == duplicate_ip
diff --git a/runner/src/driver/local.rs b/runner/src/driver/local.rs
index 983a6a7..e067034 100644
--- a/runner/src/driver/local.rs
+++ b/runner/src/driver/local.rs
@@ -7,6 +7,7 @@
 use crate::finder::{Answer, Finder};
 use crate::net::IpAddr;
 
+use anyhow::format_err;
 use std::path::{Path, PathBuf};
 
 use anyhow::{ensure, Context, Result};
@@ -19,18 +20,25 @@
 /// for testing changes locally in a development environment.
 pub(crate) struct LocalDriver {
     target: LocalTarget,
+    access_point: Option<LocalAccessPoint>,
     output_dir: PathBuf,
     ssh_binary: PathBuf,
     ffx_binary: PathBuf,
+    ffx_subtools_search_path: Option<PathBuf>,
 }
 
 impl LocalDriver {
     pub fn new<F>(
+        finder: F,
         device: Option<String>,
         ssh_binary: PathBuf,
         ssh_key: Option<PathBuf>,
         ffx_binary: PathBuf,
+        ffx_subtools_search_path: Option<PathBuf>,
         out_dir: Option<PathBuf>,
+        ap_ip: Option<String>,
+        ap_ssh_port: Option<u16>,
+        ap_ssh_key: Option<PathBuf>,
     ) -> Result<Self>
     where
         F: Finder,
@@ -39,20 +47,87 @@
             Some(p) => Ok(p),
             None => std::env::current_dir().context("Failed to get current working directory"),
         }?;
+
+        let target = LocalTarget::new(finder, device, ssh_key)?;
+
+        // If an access point IP has been provided, try to derive other AP-related parameters
+        let access_point = if let Some(ip_str) = ap_ip {
+            let ssh_port = ap_ssh_port.unwrap_or_else(|| {
+                let default_ssh_port = 22;
+                println!("AP IP provided without AP SSH port, assuming {default_ssh_port}");
+                default_ssh_port
+            });
+            let ssh_key = match ap_ssh_key {
+                Some(path) => Ok(path),
+                None => match find_ap_ssh_key() {
+                    Ok(path) => {
+                        println!("Using AP SSH key found at {}", path.display());
+                        Ok(path)
+                    }
+                    Err(e) => Err(e),
+                },
+            }?;
+            Some(LocalAccessPoint {
+                ip: ip_str.parse::<IpAddr>().expect("Failed to parse AP IP address"),
+                ssh_port: Some(ssh_port),
+                ssh_key,
+            })
+        } else {
+            None
+        };
+
         Ok(Self {
-            target: LocalTarget::new::<F>(device, ssh_key)?,
+            target,
+            access_point,
             output_dir,
             ssh_binary,
             ffx_binary,
+            ffx_subtools_search_path,
         })
     }
 }
 
+fn find_ap_ssh_key() -> Result<PathBuf> {
+    // Look for the SSH key at some known paths
+    let home_dir = std::env::var("HOME").map_err(|_| {
+        format_err!(
+            "AP IP was provided, but AP SSH key not provided and could not be automatically found"
+        )
+    })?;
+    let home_dir = Path::new(&home_dir);
+    let ssh_key_search_paths =
+        [home_dir.join(".ssh/onhub_testing_rsa"), home_dir.join(".ssh/testing_rsa")];
+    for path in ssh_key_search_paths.clone() {
+        if path.exists() {
+            return Ok(path);
+        }
+    }
+    let ssh_key_search_paths =
+        ssh_key_search_paths.map(|p| p.to_string_lossy().into_owned()).join(", ");
+    return Err(format_err!("AP IP is provided, but AP SSH key was not provided, and not found in default locations: [{}]", ssh_key_search_paths));
+}
+
 impl Driver for LocalDriver {
     fn output_path(&self) -> &Path {
         self.output_dir.as_path()
     }
     fn config(&self) -> config::Config {
+        let mut access_points = vec![];
+        if let Some(ref ap) = self.access_point {
+            access_points.push(config::AccessPoint {
+                wan_interface: "eth0".to_string(),
+                ssh_config: config::SshConfig {
+                    ssh_binary_path: self.ssh_binary.clone(),
+                    host: ap.ip.clone(),
+                    port: ap.ssh_port,
+                    user: "root".to_string(),
+                    identity_file: ap.ssh_key.clone(),
+                },
+                pdu_device: None,
+                attenuators: None,
+            });
+        }
+
         config::Config {
             testbeds: vec![config::Testbed {
                 name: TESTBED_NAME.to_string(),
@@ -61,14 +136,17 @@
                         mdns_name: self.target.name.clone(),
                         ip: self.target.ip.clone(),
                         take_bug_report_on_fail: true,
+                        ssh_port: self.target.ssh_port.clone(),
                         ssh_binary_path: self.ssh_binary.clone(),
                         // TODO(http://b/244747218): Remove when ssh_config is refactored away
                         ssh_config: None,
                         ffx_binary_path: self.ffx_binary.clone(),
+                        ffx_subtools_search_path: self.ffx_subtools_search_path.clone(),
                         ssh_priv_key: self.target.ssh_key.clone(),
                         pdu_device: None,
-                        hard_reboot_on_fail: true,
+                        hard_reboot_on_fail: false,
                     }],
+                    access_points: access_points,
                     ..Default::default()
                 },
                 test_params: None,
@@ -85,40 +163,28 @@
     }
 }
 
+struct LocalAccessPoint {
+    ip: IpAddr,
+    ssh_port: Option<u16>,
+    ssh_key: PathBuf,
+}
+
 /// LocalTargetInfo performs best-effort discovery of target information from
 /// standard Fuchsia environmental variables.
 struct LocalTarget {
     name: String,
     ip: IpAddr,
+    ssh_port: Option<u16>,
     ssh_key: PathBuf,
 }
 
 impl LocalTarget {
-    fn new<F>(device: Option<String>, ssh_key: Option<PathBuf>) -> Result<Self>
-    where
-        F: Finder,
-    {
-        let device_name = device.or_else(|| match std::env::var("FUCHSIA_DIR") {
-            Ok(dir) => match std::fs::read_to_string(format!("{dir}/out/default.device")) {
-                Ok(name) => Some(name.trim().to_string()),
-                Err(_) => {
-                    println!("A default device using \"fx set-device\" has not been set");
-                    println!("Using the first Fuchsia device discovered via mDNS");
-                    None
-                }
-            },
-            Err(_) => {
-                println!("Neither --device nor FUCHSIA_DIR has been set");
-                println!("Using the first Fuchsia device discovered via mDNS");
-                None
-            }
-        });
-
-        let Answer { name, ip } = F::find_device(device_name)?;
+    fn new<F: Finder>(finder: F, device: Option<String>, ssh_key: Option<PathBuf>) -> Result<Self> {
+        let Answer { name, ip, ssh_port } = finder.find_device(device)?;
 
         // TODO: Move this validation out to Args
         let ssh_key = ssh_key
-            .or_else(|| home_dir().map(|p| p.join(".ssh/fuchsia_ed25519").to_path_buf()))
+            .or_else(|| home_dir().map(|p| p.join(".ssh/fuchsia_ed25519")))
             .context("Failed to detect the private Fuchsia SSH key")?;
 
         ensure!(
@@ -130,7 +196,7 @@
             ssh_key.display()
         );
 
-        Ok(LocalTarget { name, ip, ssh_key })
+        Ok(LocalTarget { name, ip, ssh_port, ssh_key })
     }
 }
 
@@ -138,8 +204,7 @@
 mod test {
     use super::*;
 
-    use crate::finder::{Answer, Finder};
-    use crate::run;
+    use crate::generate_config_and_run;
     use crate::runner::{ExitStatus, Runner};
 
     use indoc::formatdoc;
@@ -147,16 +212,30 @@
     use tempfile::{NamedTempFile, TempDir};
 
     const FUCHSIA_NAME: &'static str = "fuchsia-1234-5678-9abc";
-    const FUCHSIA_ADDR: &'static str = "fe80::1%2";
+    const FUCHSIA_ADDR: &'static str = "fe80::1%eth0";
     const FUCHSIA_IP: &'static str = "fe80::1";
-    const SCOPE_ID: u32 = 2;
+    const FUCHSIA_IPV4: &'static str = "127.0.0.1";
+    const FUCHSIA_SSH_PORT: u16 = 5002;
+    const SCOPE_ID: &'static str = "eth0";
 
     struct MockFinder;
     impl Finder for MockFinder {
-        fn find_device(_: Option<String>) -> Result<Answer> {
+        fn find_device(&self, _: Option<String>) -> Result<Answer> {
             Ok(Answer {
                 name: FUCHSIA_NAME.to_string(),
-                ip: IpAddr::V6(FUCHSIA_IP.parse().unwrap(), Some(SCOPE_ID)),
+                ip: IpAddr::V6(FUCHSIA_IP.parse().unwrap(), Some(SCOPE_ID.to_string())),
+                ssh_port: None,
+            })
+        }
+    }
+
+    struct MockFinderWithSsh;
+    impl Finder for MockFinderWithSsh {
+        fn find_device(&self, _: Option<String>) -> Result<Answer> {
+            Ok(Answer {
+                name: FUCHSIA_NAME.to_string(),
+                ip: IpAddr::V4(FUCHSIA_IPV4.parse().unwrap()),
+                ssh_port: Some(FUCHSIA_SSH_PORT),
             })
         }
     }
@@ -178,12 +257,17 @@
         let ffx = NamedTempFile::new().unwrap();
         let out_dir = TempDir::new().unwrap();
 
-        assert!(LocalDriver::new::<MockFinder>(
+        assert!(LocalDriver::new(
+            MockFinder {},
             None,
             ssh.path().to_path_buf(),
             Some(PathBuf::new()),
             ffx.path().to_path_buf(),
+            None,
             Some(out_dir.path().to_path_buf()),
+            None,
+            None,
+            None,
         )
         .is_err());
     }
@@ -193,25 +277,32 @@
         let ssh = NamedTempFile::new().unwrap();
         let ssh_key = NamedTempFile::new().unwrap();
         let ffx = NamedTempFile::new().unwrap();
+        let ffx_subtools = TempDir::new().unwrap();
         let out_dir = TempDir::new().unwrap();
 
         let runner = MockRunner::default();
-        let driver = LocalDriver::new::<MockFinder>(
+        let driver = LocalDriver::new(
+            MockFinder {},
             None,
             ssh.path().to_path_buf(),
             Some(ssh_key.path().to_path_buf()),
             ffx.path().to_path_buf(),
+            Some(ffx_subtools.path().to_path_buf()),
             Some(out_dir.path().to_path_buf()),
+            None,
+            None,
+            None,
         )
         .unwrap();
 
-        run(runner, driver, None).unwrap();
+        generate_config_and_run(runner, driver, None).unwrap();
 
         let got = std::fs::read_to_string(out_dir.path().join("config.yaml")).unwrap();
 
         let ssh_path = ssh.path().display();
         let ssh_key_path = ssh_key.path().display();
         let ffx_path = ffx.path().display();
+        let ffx_subtools_path = ffx_subtools.path().display();
         let out_path = out_dir.path().display();
         let want = formatdoc! {r#"
         TestBeds:
@@ -223,8 +314,62 @@
               take_bug_report_on_fail: true
               ssh_binary_path: {ssh_path}
               ffx_binary_path: {ffx_path}
+              ffx_subtools_search_path: {ffx_subtools_path}
               ssh_priv_key: {ssh_key_path}
-              hard_reboot_on_fail: true
+              hard_reboot_on_fail: false
+        MoblyParams:
+          LogPath: {out_path}
+        "#};
+
+        assert_eq!(got, want);
+    }
+
+    #[test]
+    fn local_with_ssh_port() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let ffx_subtools = TempDir::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+
+        let runner = MockRunner::default();
+        let driver = LocalDriver::new(
+            MockFinderWithSsh {},
+            None,
+            ssh.path().to_path_buf(),
+            Some(ssh_key.path().to_path_buf()),
+            ffx.path().to_path_buf(),
+            Some(ffx_subtools.path().to_path_buf()),
+            Some(out_dir.path().to_path_buf()),
+            None,
+            None,
+            None,
+        )
+        .unwrap();
+
+        generate_config_and_run(runner, driver, None).unwrap();
+
+        let got = std::fs::read_to_string(out_dir.path().join("config.yaml")).unwrap();
+
+        let ssh_path = ssh.path().display();
+        let ssh_key_path = ssh_key.path().display();
+        let ffx_path = ffx.path().display();
+        let ffx_subtools_path = ffx_subtools.path().display();
+        let out_path = out_dir.path().display();
+        let want = formatdoc! {r#"
+        TestBeds:
+        - Name: {TESTBED_NAME}
+          Controllers:
+            FuchsiaDevice:
+            - mdns_name: {FUCHSIA_NAME}
+              ip: {FUCHSIA_IPV4}
+              ssh_port: {FUCHSIA_SSH_PORT}
+              take_bug_report_on_fail: true
+              ssh_binary_path: {ssh_path}
+              ffx_binary_path: {ffx_path}
+              ffx_subtools_search_path: {ffx_subtools_path}
+              ssh_priv_key: {ssh_key_path}
+              hard_reboot_on_fail: false
         MoblyParams:
           LogPath: {out_path}
         "#};
@@ -237,15 +382,21 @@
         let ssh = NamedTempFile::new().unwrap();
         let ssh_key = NamedTempFile::new().unwrap();
         let ffx = NamedTempFile::new().unwrap();
+        let ffx_subtools = TempDir::new().unwrap();
         let out_dir = TempDir::new().unwrap();
 
         let runner = MockRunner::default();
-        let driver = LocalDriver::new::<MockFinder>(
+        let driver = LocalDriver::new(
+            MockFinder {},
             None,
             ssh.path().to_path_buf(),
             Some(ssh_key.path().to_path_buf()),
             ffx.path().to_path_buf(),
+            Some(ffx_subtools.path().to_path_buf()),
             Some(out_dir.path().to_path_buf()),
+            None,
+            None,
+            None,
         )
         .unwrap();
 
@@ -255,13 +406,14 @@
         ";
         let params = serde_yaml::from_str(params_yaml).unwrap();
 
-        run(runner, driver, Some(params)).unwrap();
+        generate_config_and_run(runner, driver, Some(params)).unwrap();
 
         let got = std::fs::read_to_string(out_dir.path().join("config.yaml")).unwrap();
 
         let ssh_path = ssh.path().display().to_string();
         let ssh_key_path = ssh_key.path().display().to_string();
         let ffx_path = ffx.path().display().to_string();
+        let ffx_subtools_path = ffx_subtools.path().display();
         let out_path = out_dir.path().display();
         let want = formatdoc! {r#"
         TestBeds:
@@ -273,8 +425,9 @@
               take_bug_report_on_fail: true
               ssh_binary_path: {ssh_path}
               ffx_binary_path: {ffx_path}
+              ffx_subtools_search_path: {ffx_subtools_path}
               ssh_priv_key: {ssh_key_path}
-              hard_reboot_on_fail: true
+              hard_reboot_on_fail: false
           TestParams:
             sl4f_sanity_test_params:
               foo: bar
@@ -284,4 +437,68 @@
 
         assert_eq!(got, want);
     }
+
+    #[test]
+    fn local_with_ap() {
+        let ssh = NamedTempFile::new().unwrap();
+        let ssh_key = NamedTempFile::new().unwrap();
+        let ffx = NamedTempFile::new().unwrap();
+        let ffx_subtools = TempDir::new().unwrap();
+        let out_dir = TempDir::new().unwrap();
+        let ap_ssh_key = NamedTempFile::new().unwrap();
+        let ap_ssh_port: u16 = 1245;
+        let ap_ip = "192.168.1.1".to_string();
+
+        let runner = MockRunner::default();
+        let driver = LocalDriver::new(
+            MockFinder {},
+            None,
+            ssh.path().to_path_buf(),
+            Some(ssh_key.path().to_path_buf()),
+            ffx.path().to_path_buf(),
+            Some(ffx_subtools.path().to_path_buf()),
+            Some(out_dir.path().to_path_buf()),
+            Some(ap_ip.clone()),
+            Some(ap_ssh_port),
+            Some(ap_ssh_key.path().to_path_buf()),
+        )
+        .unwrap();
+
+        generate_config_and_run(runner, driver, None).unwrap();
+
+        let got = std::fs::read_to_string(out_dir.path().join("config.yaml")).unwrap();
+
+        let ssh_path = ssh.path().display();
+        let ssh_key_path = ssh_key.path().display();
+        let ap_ssh_key_path = ap_ssh_key.path().display();
+        let ffx_path = ffx.path().display();
+        let ffx_subtools_path = ffx_subtools.path().display();
+        let out_path = out_dir.path().display();
+        let want = formatdoc! {r#"
+        TestBeds:
+        - Name: {TESTBED_NAME}
+          Controllers:
+            FuchsiaDevice:
+            - mdns_name: {FUCHSIA_NAME}
+              ip: {FUCHSIA_ADDR}
+              take_bug_report_on_fail: true
+              ssh_binary_path: {ssh_path}
+              ffx_binary_path: {ffx_path}
+              ffx_subtools_search_path: {ffx_subtools_path}
+              ssh_priv_key: {ssh_key_path}
+              hard_reboot_on_fail: false
+            AccessPoint:
+            - wan_interface: eth0
+              ssh_config:
+                ssh_binary_path: {ssh_path}
+                host: {ap_ip}
+                port: {ap_ssh_port}
+                user: root
+                identity_file: {ap_ssh_key_path}
+        MoblyParams:
+          LogPath: {out_path}
+        "#};
+
+        assert_eq!(got, want);
+    }
 }
diff --git a/runner/src/finder.rs b/runner/src/finder.rs
index c381b36..9cb2032 100644
--- a/runner/src/finder.rs
+++ b/runner/src/finder.rs
@@ -4,12 +4,14 @@
 
 use crate::net::IpAddr;
 
-use std::io;
+use itertools::Itertools;
 use std::net::{Ipv6Addr, SocketAddr, SocketAddrV6, UdpSocket};
-use std::str;
+use std::path::PathBuf;
+use std::process::Command;
 use std::time::{Duration, Instant};
+use std::{io, str};
 
-use anyhow::{bail, Context, Result};
+use anyhow::{bail, format_err, Context, Result};
 use mdns::protocol as dns;
 use netext::{get_mcast_interfaces, IsLocalAddr, McastInterface};
 use packet::{InnerPacketBuilder, ParseBuffer};
@@ -27,7 +29,7 @@
 /// Find Fuchsia devices.
 pub(crate) trait Finder {
     /// Find a Fuchsia device, preferring `device_name` if specified.
-    fn find_device(device_name: Option<String>) -> Result<Answer>;
+    fn find_device(&self, device_name: Option<String>) -> Result<Answer>;
 }
 
 /// Answer from a Finder.
@@ -36,14 +38,63 @@
     pub name: String,
     /// IP address of the Fuchsia device.
     pub ip: IpAddr,
+    /// Port of the Fuchsia device for SSH.
+    pub ssh_port: Option<u16>,
 }
 
+pub(crate) struct FfxDevice {
+    pub ffx_binary: PathBuf,
+}
 pub(crate) struct MulticastDns {}
 
+impl Finder for FfxDevice {
+    /// Queries FFX for a registered device
+    fn find_device(&self, device_name: Option<String>) -> Result<Answer> {
+        let program = self.ffx_binary.clone().into_os_string().into_string().unwrap();
+        let mut args: Vec<&str> = vec!["--machine", "json"];
+        if device_name.is_some() {
+            args.push("-t");
+            args.push(device_name.as_ref().unwrap());
+        }
+        args.push("target");
+        args.push("show");
+
+        println!("Querying FFX for device parameters: {} {}", program, args.iter().format(" "));
+
+        let output = Command::new(program).args(args).output().expect("failed to execute process");
+        if output.status.success() {
+            let output_str = String::from_utf8(output.stdout).unwrap();
+            let output_json: serde_json::Value = serde_json::from_str(&output_str).unwrap();
+            let target = output_json["target"].as_object().unwrap();
+            let name = target["name"].as_str().unwrap();
+            let ssh_address = target["ssh_address"].as_object().unwrap();
+            let host = ssh_address["host"].as_str().unwrap();
+            let port = ssh_address["port"].as_u64().unwrap();
+            let ip = host
+                .replace("[", "") // FFX returns IPv6 addresses wrapped in brackets, which doesn't work with `.parse()`
+                .replace("]", "")
+                .parse()
+                .context(format!("Attempting to parse string into IP address: {}", host))
+                .unwrap();
+
+            let answer = Answer { name: name.to_string(), ip, ssh_port: Some(port as u16) };
+            println!("Device {} at {}:{:?}", answer.name, answer.ip, port);
+            Ok(answer)
+        } else {
+            return Err(format_err!(
+                "FFX exited with status {}: {} {}",
+                output.status,
+                String::from_utf8(output.stdout).unwrap(),
+                String::from_utf8(output.stderr).unwrap()
+            ));
+        }
+    }
+}
+
 impl Finder for MulticastDns {
     /// Find a Fuchsia device using mDNS. If `device_name` is not specified, the
     /// first device will be used.
-    fn find_device(device_name: Option<String>) -> Result<Answer> {
+    fn find_device(&self, device_name: Option<String>) -> Result<Answer> {
         let interfaces =
             get_mcast_interfaces().context("Failed to list multicast-enabled interfaces")?;
         let interface_names =
@@ -62,8 +113,8 @@
         send_queries(&socket, interfaces.iter()).context("Failed to send mDNS queries")?;
         let answer = listen_for_answers(socket, device_name)?;
 
-        println!("Device {} found at {}", answer.name, answer.ip);
-        Ok(answer)
+        println!("Device {} found at {}", answer.name, answer.ip,);
+        Ok(Answer { name: answer.name, ip: answer.ip, ssh_port: None })
     }
 }
 
@@ -145,7 +196,12 @@
     Ok(())
 }
 
-fn listen_for_answers(socket: Socket, device_name: Option<String>) -> Result<Answer> {
+struct MdnsAnswer {
+    name: String,
+    ip: IpAddr,
+}
+
+fn listen_for_answers(socket: Socket, device_name: Option<String>) -> Result<MdnsAnswer> {
     let s: UdpSocket = socket.into();
     let mut buf = [0; 1500];
 
@@ -171,7 +227,7 @@
                                         .to_string()
                                         .trim_end_matches(".local")
                                         .to_string();
-                                    let scope_id = src_v6.scope_id();
+                                    let scope_id = scope_id_to_name_checked(src_v6.scope_id())?;
 
                                     if let Some(ref device) = device_name {
                                         if &name != device {
@@ -180,7 +236,7 @@
                                         }
                                     }
 
-                                    return Ok(Answer {
+                                    return Ok(MdnsAnswer {
                                         name,
                                         ip: IpAddr::V6(addr, Some(scope_id)),
                                     });
@@ -198,3 +254,13 @@
 
     bail!("device {device_name:?} not found")
 }
+
+fn scope_id_to_name_checked(scope_id: u32) -> Result<String> {
+    let mut buf = vec![0; libc::IF_NAMESIZE];
+    let res = unsafe { libc::if_indextoname(scope_id, buf.as_mut_ptr() as *mut libc::c_char) };
+    if res.is_null() {
+        bail!("{scope_id} is not a valid network interface ID")
+    } else {
+        Ok(String::from_utf8_lossy(&buf.split(|&c| c == 0u8).next().unwrap_or(&[0u8])).to_string())
+    }
+}
diff --git a/runner/src/main.rs b/runner/src/main.rs
index 4252694..d2e89dc 100644
--- a/runner/src/main.rs
+++ b/runner/src/main.rs
@@ -13,13 +13,13 @@
 use crate::driver::infra::{InfraDriver, InfraDriverError};
 use crate::runner::ExitStatus;
 
+use std::fs;
 use std::fs::File;
 use std::path::PathBuf;
-use std::{fs, process::ExitCode};
+use std::process::ExitCode;
 
 use anyhow::{Context, Result};
 use argh::FromArgs;
-use serde_yaml;
 use serde_yaml::Value;
 
 #[derive(FromArgs)]
@@ -43,6 +43,10 @@
     #[argh(option, from_str_fn(parse_file))]
     ffx_binary: PathBuf,
 
+    /// search path to the FFX binary used to communicate with Fuchsia
+    #[argh(option, from_str_fn(parse_directory))]
+    ffx_subtools_search_path: Option<PathBuf>,
+
     /// path to the python interpreter binary (e.g. /bin/python3.9)
     #[argh(option)]
     python_bin: String,
@@ -60,6 +64,27 @@
     /// "test_params" key in the antlion config
     #[argh(option, from_str_fn(parse_file))]
     test_params: Option<PathBuf>,
+
+    /// list of test cases to run; defaults to all test cases
+    #[argh(positional)]
+    test_cases: Vec<String>,
+
+    /// user-defined configuration for the test; overrides all other options related to the test
+    /// configratuion. By default, a config file will be generated based on the other parameters.
+    #[argh(option, from_str_fn(parse_file))]
+    config_override: Option<PathBuf>,
+
+    /// ip of the AP
+    #[argh(option)]
+    ap_ip: Option<String>,
+
+    /// ssh port of the AP
+    #[argh(option)]
+    ap_ssh_port: Option<u16>,
+
+    /// path to the SSH private key used to communicate with the AP
+    #[argh(option, from_str_fn(parse_file))]
+    ap_ssh_key: Option<PathBuf>,
 }
 
 fn parse_file(s: &str) -> Result<PathBuf, String> {
@@ -78,7 +103,25 @@
     Ok(path)
 }
 
-fn run<R, D>(runner: R, driver: D, test_params: Option<Value>) -> Result<ExitCode>
+fn run_with_config<R>(runner: R, config_path: PathBuf) -> Result<ExitCode>
+where
+    R: runner::Runner,
+{
+    let exit_code = runner.run(config_path).context("Failed to run antlion")?;
+    match exit_code {
+        ExitStatus::Ok => println!("Antlion successfully exited"),
+        ExitStatus::Err(code) => eprintln!("Antlion failed with status code {}", code),
+        ExitStatus::Interrupt(Some(code)) => eprintln!("Antlion interrupted by signal {}", code),
+        ExitStatus::Interrupt(None) => eprintln!("Antlion interrupted by signal"),
+    };
+    Ok(exit_code.into())
+}
+
+fn generate_config_and_run<R, D>(
+    runner: R,
+    driver: D,
+    test_params: Option<Value>,
+) -> Result<ExitCode>
 where
     R: runner::Runner,
     D: driver::Driver,
@@ -93,26 +136,24 @@
 
     let output_path = driver.output_path().to_path_buf();
     let config_path = output_path.join("config.yaml");
-    println!("Writing {}", config_path.display());
+    println!("Generating config {}", config_path.display());
     println!("\n{yaml}\n");
     fs::write(&config_path, yaml).context("Failed to write config to file")?;
 
-    let exit_code = runner.run(config_path).context("Failed to run antlion")?;
-    match exit_code {
-        ExitStatus::Ok => println!("Antlion successfully exited"),
-        ExitStatus::Err(code) => eprintln!("Antlion failed with status code {}", code),
-        ExitStatus::Interrupt(Some(code)) => eprintln!("Antlion interrupted by signal {}", code),
-        ExitStatus::Interrupt(None) => eprintln!("Antlion interrupted by signal"),
-    };
+    let result = run_with_config(runner, config_path);
     driver.teardown().context("Failed to teardown environment")?;
-    Ok(exit_code.into())
+
+    result
 }
 
 fn main() -> Result<ExitCode> {
     let args: Args = argh::from_env();
     let env = env::LocalEnvironment;
-    let runner =
-        runner::ProcessRunner { python_bin: args.python_bin, antlion_pyz: args.antlion_pyz };
+    let runner = runner::ProcessRunner {
+        python_bin: args.python_bin,
+        antlion_pyz: args.antlion_pyz,
+        test_cases: args.test_cases,
+    };
 
     let test_params = match args.test_params {
         Some(path) => {
@@ -125,8 +166,18 @@
         None => None,
     };
 
-    match InfraDriver::new(env, args.ssh_binary.clone(), args.ffx_binary.clone()) {
-        Ok(env) => return run(runner, env, test_params),
+    if let Some(config_path) = args.config_override {
+        println!("Using config at {}", config_path.display());
+        return run_with_config(runner, config_path);
+    }
+
+    match InfraDriver::new(
+        env,
+        args.ssh_binary.clone(),
+        args.ffx_binary.clone(),
+        args.ffx_subtools_search_path.clone(),
+    ) {
+        Ok(env) => return generate_config_and_run(runner, env, test_params),
         Err(InfraDriverError::NotDetected(_)) => {}
         Err(InfraDriverError::Config(e)) => {
             return Err(anyhow::Error::from(e).context("Config validation"))
@@ -136,14 +187,39 @@
         }
     };
 
-    let env = driver::local::LocalDriver::new::<finder::MulticastDns>(
+    let ffx_finder = finder::FfxDevice { ffx_binary: args.ffx_binary.clone() };
+    let driver_via_ffx_discovery = driver::local::LocalDriver::new(
+        ffx_finder,
         args.device.clone(),
         args.ssh_binary.clone(),
         args.ssh_key.clone(),
         args.ffx_binary.clone(),
+        args.ffx_subtools_search_path.clone(),
         args.out_dir.clone(),
-    )
-    .context("Failed to detect local environment")?;
+        args.ap_ip.clone(),
+        args.ap_ssh_port,
+        args.ap_ssh_key.clone(),
+    );
+    match driver_via_ffx_discovery {
+        Ok(driver) => return generate_config_and_run(runner, driver, test_params),
+        Err(e) => {
+            println!("Failed to generate device config via FFX: {:?}", e);
+            println!("Falling back to mDNS discovery");
+        }
+    };
 
-    run(runner, env, test_params)
+    let driver = driver::local::LocalDriver::new(
+        finder::MulticastDns {},
+        args.device.clone(),
+        args.ssh_binary.clone(),
+        args.ssh_key.clone(),
+        args.ffx_binary.clone(),
+        args.ffx_subtools_search_path.clone(),
+        args.out_dir.clone(),
+        args.ap_ip.clone(),
+        args.ap_ssh_port,
+        args.ap_ssh_key.clone(),
+    )
+    .context("Failed to generate config for local environment")?;
+    generate_config_and_run(runner, driver, test_params)
 }
diff --git a/runner/src/net.rs b/runner/src/net.rs
index 70db2eb..35dc07a 100644
--- a/runner/src/net.rs
+++ b/runner/src/net.rs
@@ -7,17 +7,16 @@
 use std::net::{Ipv4Addr, Ipv6Addr};
 
 use netext::IsLocalAddr;
-use nix::net::if_::if_nametoindex;
 use serde::{Deserialize, Serialize};
 use thiserror::Error;
 
 /// IP address with support for IPv6 scope identifiers as defined in RFC 4007.
-#[derive(Copy, Clone, Eq, PartialEq, Hash, PartialOrd, Ord)]
+#[derive(Clone, Eq, PartialEq, Hash, PartialOrd, Ord)]
 pub enum IpAddr {
     /// An IPv4 address.
     V4(Ipv4Addr),
     /// An IPv6 address with optional scope identifier.
-    V6(Ipv6Addr, Option<u32>),
+    V6(Ipv6Addr, Option<String>),
 }
 
 impl Into<std::net::IpAddr> for IpAddr {
@@ -80,13 +79,10 @@
                 if !ip.is_link_local_addr() {
                     return Err(AddrParseError::ScopeNotSupported);
                 }
-                if let Ok(index) = scope.parse::<u32>() {
-                    return Ok(IpAddr::V6(ip, Some(index)));
+                if scope.len() == 0 {
+                    return Err(AddrParseError::InterfaceNotFound(scope.to_string()))
                 }
-                match if_nametoindex(scope) {
-                    Ok(index) => Ok(IpAddr::V6(ip, Some(index))),
-                    Err(_) => Err(AddrParseError::InterfaceNotFound(scope.to_string())),
-                }
+                Ok(IpAddr::V6(ip, Some(scope.to_string())))
             }
         }
     }
@@ -207,7 +203,7 @@
             "fe80::1%1".parse::<IpAddr>(),
             Ok(IpAddr::V6(ip, Some(scope)))
                 if ip == "fe80::1".parse::<std::net::Ipv6Addr>().unwrap()
-                && scope == 1
+                && scope == "1"
         );
     }
 
@@ -219,13 +215,5 @@
             Err(AddrParseError::InterfaceNotFound(name))
                 if name == ""
         );
-
-        // The trailing '%' forces a failed lookup. At the time of writing, no
-        // OS supports this character as part of interface names.
-        assert_matches!(
-            "fe80::1%eth0%".parse::<IpAddr>(),
-            Err(AddrParseError::InterfaceNotFound(name))
-                if name == "eth0%"
-        );
     }
 }
diff --git a/runner/src/runner.rs b/runner/src/runner.rs
index c40e05d..9f4519e 100644
--- a/runner/src/runner.rs
+++ b/runner/src/runner.rs
@@ -2,14 +2,26 @@
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
+use signal_hook::consts::signal::{SIGINT, SIGQUIT, SIGTERM};
 #[cfg(unix)]
 use std::os::unix::process::ExitStatusExt;
-use std::process::Command;
-use std::{path::PathBuf, process::ExitCode};
+use std::path::PathBuf;
+use std::process::{Command, ExitCode};
+use std::sync::atomic::{AtomicUsize, Ordering};
+use std::sync::Arc;
+use std::time::{Duration, Instant};
 
-use anyhow::{Context, Result};
+use anyhow::{bail, Context, Result};
 use itertools::Itertools;
 
+// Time to wait for antlion to cleanup after receiving a termination signal. If
+// the process is unable to terminate within this time, the process will be
+// killed without further warning.
+const TERM_TIMEOUT_SEC: u64 = 3;
+
+// Busy-wait sleep duration between polling for antlion termination.
+const TERM_CHECK_INTERVAL_MS: u64 = 100;
+
 /// Runner for dispatching antlion.
 pub(crate) trait Runner {
     /// Run antlion using the provided config and output directory.
@@ -20,28 +32,108 @@
 pub(crate) struct ProcessRunner {
     pub python_bin: String,
     pub antlion_pyz: PathBuf,
+    pub test_cases: Vec<String>,
+}
+
+// TODO(http://b/401318909): Remove this once Fuchsia Controller no longer panics during teardown.
+fn test_affected_by_b_401318909(test_name: String) -> bool {
+    let test_substrings_affected_by_b_401318909 = [
+        "channel_switch_test",
+        "deprecated_configuration_test"
+    ];
+
+    for test in test_substrings_affected_by_b_401318909 {
+        if test_name.contains(test) {
+            return true;
+        }
+    }
+
+    false
 }
 
 impl Runner for ProcessRunner {
     fn run(&self, config: PathBuf) -> Result<ExitStatus> {
-        let args = [
-            &self.antlion_pyz.clone().into_os_string().into_string().unwrap(),
-            "--config",
-            &config.into_os_string().into_string().unwrap(),
+        let mut args = vec![
+            self.antlion_pyz.clone().into_os_string().into_string().unwrap(),
+            "--config".to_string(),
+            config.into_os_string().into_string().unwrap(),
         ];
 
+        if !self.test_cases.is_empty() {
+            args.push("--test_case".to_string());
+            for test_case in self.test_cases.iter() {
+                args.push(test_case.clone());
+            }
+        }
+
         println!(
             "Launching antlion to run: \"{} {}\"\n",
             &self.python_bin,
             args.iter().format(" "),
         );
 
-        let status = Command::new(&self.python_bin)
-            .args(args)
-            .status()
-            .context("Failed to execute antlion")?;
+        let mut child =
+            Command::new(&self.python_bin).args(args).spawn().context("Failed to spawn antlion")?;
 
-        Ok(ExitStatus::from(status))
+        // Start monitoring for termination signals.
+        let term = Arc::new(AtomicUsize::new(0));
+        signal_hook::flag::register_usize(SIGINT, term.clone(), SIGINT as usize)?;
+        signal_hook::flag::register_usize(SIGTERM, term.clone(), SIGTERM as usize)?;
+        signal_hook::flag::register_usize(SIGQUIT, term.clone(), SIGQUIT as usize)?;
+
+        loop {
+            if let Some(exit_status) =
+                child.try_wait().context("Failed waiting for antlion to finish")?
+            {
+                if exit_status.core_dumped() {
+                    if test_affected_by_b_401318909(
+                        self.antlion_pyz.clone().into_os_string().into_string().unwrap()
+                    ) {
+                        eprintln!(
+                            "Received expected core dump after running test. \
+                            Remove this once http://b/401318909 has been resolved."
+                        );
+                        return Ok(ExitStatus::Ok);
+                    } else {
+                        bail!(
+                            "Expected core dump after running test, but didn't receive one. \
+                            Perhaps http://b/401318909 has been resolved? If so, remove this failure."
+                        );
+                    }
+                }
+
+                return Ok(ExitStatus::from(exit_status));
+            }
+
+            let signal = term.load(Ordering::Relaxed) as i32;
+            if signal != 0 {
+                println!("Forwarding signal {signal} to antlion");
+                nix::sys::signal::kill(
+                    nix::unistd::Pid::from_raw(
+                        child.id().try_into().context("Failed to convert pid to i32")?,
+                    ),
+                    Some(signal.try_into().context("Failed to convert signal")?),
+                )
+                .context("Failed to forward signal to antlion")?;
+
+                println!("Waiting {} seconds for antlion to terminate", TERM_TIMEOUT_SEC);
+                let timeout = Instant::now() + Duration::from_secs(TERM_TIMEOUT_SEC);
+                while Instant::now() < timeout {
+                    if let Some(_) =
+                        child.try_wait().context("Failed waiting for antlion to finish")?
+                    {
+                        return Ok(ExitStatus::Interrupt(Some(signal)));
+                    }
+                    std::thread::sleep(std::time::Duration::from_millis(TERM_CHECK_INTERVAL_MS));
+                }
+
+                eprintln!("antlion is unresponsive, killing process");
+                child.kill().context("Failed to kill antlion process")?;
+                return Ok(ExitStatus::Interrupt(Some(signal)));
+            }
+
+            std::thread::sleep(std::time::Duration::from_millis(TERM_CHECK_INTERVAL_MS));
+        }
     }
 }
 
diff --git a/setup.py b/setup.py
index 28f080f..f6b0241 100644
--- a/setup.py
+++ b/setup.py
@@ -14,16 +14,15 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from setuptools import setup, find_packages
+from setuptools import find_packages, setup
 
 install_requires = [
-    "mobly==1.12.0",
+    "mobly==1.12.2",
     "pyyaml>=5.1",
     "tenacity~=8.0",
     # TODO(b/240443856): Remove these dependencies once antlion runs in
     # Fuchsia's LUCI infrastructure. These are needed for flashing and using
     # mDNS discovery, which are unnecessary in the future infrastructure.
-    "usbinfo",
     "psutil",
     "zeroconf",
 ]
@@ -34,9 +33,9 @@
     description="Host-driven, hardware-agnostic Fuchsia connectivity tests",
     license="Apache-2.0",
     packages=find_packages(
-        where="src",
+        where="packages",
     ),
-    package_dir={"": "src"},
+    package_dir={"": "packages"},
     include_package_data=True,
     tests_require=[],
     install_requires=install_requires,
diff --git a/src/antlion/base_test.py b/src/antlion/base_test.py
deleted file mode 100755
index 5033552..0000000
--- a/src/antlion/base_test.py
+++ /dev/null
@@ -1,976 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import fnmatch
-import functools
-import importlib
-import logging
-import os
-import traceback
-from concurrent.futures import ThreadPoolExecutor
-
-from antlion import error
-from antlion import keys
-from antlion import logger
-from antlion import records
-from antlion import signals
-from antlion import tracelogger
-from antlion import utils
-from antlion.event import event_bus
-from antlion.event import subscription_bundle
-from antlion.event.decorators import subscribe_static
-from antlion.event.event import TestCaseBeginEvent
-from antlion.event.event import TestCaseEndEvent
-from antlion.event.event import TestClassBeginEvent
-from antlion.event.event import TestClassEndEvent
-from antlion.event.subscription_bundle import SubscriptionBundle
-
-from mobly import asserts
-from mobly.base_test import BaseTestClass as MoblyBaseTest
-from mobly.records import ExceptionRecord
-
-# Macro strings for test result reporting
-TEST_CASE_TOKEN = "[Test Case]"
-RESULT_LINE_TEMPLATE = TEST_CASE_TOKEN + " %s %s"
-
-
-@subscribe_static(TestCaseBeginEvent)
-def _logcat_log_test_begin(event):
-    """Ensures that logcat is running. Write a logcat line indicating test case
-    begin."""
-    test_instance = event.test_class
-    try:
-        for ad in getattr(test_instance, "android_devices", []):
-            if not ad.is_adb_logcat_on:
-                ad.start_adb_logcat()
-            # Write test start token to adb log if android device is attached.
-            if not ad.skip_sl4a and ad.droid:
-                ad.droid.logV("%s BEGIN %s" % (TEST_CASE_TOKEN, event.test_case_name))
-
-    except error.ActsError as e:
-        test_instance.results.error.append(
-            ExceptionRecord(e, "Logcat for test begin: %s" % event.test_case_name)
-        )
-        test_instance.log.error("BaseTest setup_test error: %s" % e.details)
-    except Exception as e:
-        test_instance.log.warning("Unable to send BEGIN log command to all devices.")
-        test_instance.log.warning("Error: %s" % e)
-
-
-@subscribe_static(TestCaseEndEvent)
-def _logcat_log_test_end(event):
-    """Write a logcat line indicating test case end."""
-    test_instance = event.test_class
-    try:
-        # Write test end token to adb log if android device is attached.
-        for ad in getattr(test_instance, "android_devices", []):
-            if not ad.skip_sl4a and ad.droid:
-                ad.droid.logV("%s END %s" % (TEST_CASE_TOKEN, event.test_case_name))
-
-    except error.ActsError as e:
-        test_instance.results.error.append(
-            ExceptionRecord(e, "Logcat for test end: %s" % event.test_case_name)
-        )
-        test_instance.log.error("BaseTest teardown_test error: %s" % e.details)
-    except Exception as e:
-        test_instance.log.warning("Unable to send END log command to all devices.")
-        test_instance.log.warning("Error: %s" % e)
-
-
-@subscribe_static(TestCaseBeginEvent)
-def _syslog_log_test_begin(event):
-    """This adds a BEGIN log message with the test name to the syslog of any
-    Fuchsia device"""
-    test_instance = event.test_class
-    try:
-        for fd in getattr(test_instance, "fuchsia_devices", []):
-            if hasattr(fd, "_sl4f"):
-                fd.sl4f.logging_lib.logI(
-                    "%s BEGIN %s" % (TEST_CASE_TOKEN, event.test_case_name)
-                )
-
-    except Exception as e:
-        test_instance.log.warning("Unable to send BEGIN log command to all devices.")
-        test_instance.log.warning("Error: %s" % e)
-
-
-@subscribe_static(TestCaseEndEvent)
-def _syslog_log_test_end(event):
-    """This adds a END log message with the test name to the syslog of any
-    Fuchsia device"""
-    test_instance = event.test_class
-    try:
-        for fd in getattr(test_instance, "fuchsia_devices", []):
-            if hasattr(fd, "_sl4f"):
-                fd.sl4f.logging_lib.logI(
-                    "%s END %s" % (TEST_CASE_TOKEN, event.test_case_name)
-                )
-
-    except Exception as e:
-        test_instance.log.warning("Unable to send END log command to all devices.")
-        test_instance.log.warning("Error: %s" % e)
-
-
-event_bus.register_subscription(_logcat_log_test_begin.subscription)
-event_bus.register_subscription(_logcat_log_test_end.subscription)
-event_bus.register_subscription(_syslog_log_test_begin.subscription)
-event_bus.register_subscription(_syslog_log_test_end.subscription)
-
-
-class Error(Exception):
-    """Raised for exceptions that occured in BaseTestClass."""
-
-
-class BaseTestClass(MoblyBaseTest):
-    """Base class for all test classes to inherit from. Inherits some
-    functionality from Mobly's base test class.
-
-    This class gets all the controller objects from test_runner and executes
-    the test cases requested within itself.
-
-    Most attributes of this class are set at runtime based on the configuration
-    provided.
-
-    Attributes:
-        tests: A list of strings, each representing a test case name.
-        TAG: A string used to refer to a test class. Default is the test class
-             name.
-        log: A logger object used for logging.
-        results: A records.TestResult object for aggregating test results from
-                 the execution of test cases.
-        controller_configs: A dict of controller configs provided by the user
-                            via the testbed config.
-        consecutive_failures: Tracks the number of consecutive test case
-                              failures within this class.
-        consecutive_failure_limit: Number of consecutive test failures to allow
-                                   before blocking remaining tests in the same
-                                   test class.
-        size_limit_reached: True if the size of the log directory has reached
-                            its limit.
-        current_test_name: A string that's the name of the test case currently
-                           being executed. If no test is executing, this should
-                           be None.
-    """
-
-    TAG = None
-
-    def __init__(self, configs):
-        """Initializes a BaseTestClass given a TestRunConfig, which provides
-        all of the config information for this test class.
-
-        Args:
-            configs: A config_parser.TestRunConfig object.
-        """
-        super().__init__(configs)
-
-        self.__handle_file_user_params()
-
-        self.class_subscriptions = SubscriptionBundle()
-        self.class_subscriptions.register()
-        self.all_subscriptions = [self.class_subscriptions]
-
-        self.current_test_name = None
-        self.log = tracelogger.TraceLogger(logging.getLogger())
-        # TODO: remove after converging log path definitions with mobly
-        self.log_path = configs.log_path
-
-        self.consecutive_failures = 0
-        self.consecutive_failure_limit = self.user_params.get(
-            "consecutive_failure_limit", -1
-        )
-        self.size_limit_reached = False
-        self.retryable_exceptions = signals.TestFailure
-
-    def _import_builtin_controllers(self):
-        """Import built-in controller modules.
-
-        Go through the testbed configs, find any built-in controller configs
-        and import the corresponding controller module from antlion.controllers
-        package.
-
-        Returns:
-            A list of controller modules.
-        """
-        builtin_controllers = []
-        for ctrl_name in keys.Config.builtin_controller_names.value:
-            if ctrl_name in self.controller_configs:
-                module_name = keys.get_module_name(ctrl_name)
-                module = importlib.import_module("antlion.controllers.%s" % module_name)
-                builtin_controllers.append(module)
-        return builtin_controllers
-
-    def __handle_file_user_params(self):
-        """For backwards compatibility, moves all contents of the "files" dict
-        into the root level of user_params.
-
-        This allows existing tests to run with the new Mobly-style format
-        without needing to make changes.
-        """
-        for key, value in self.user_params.items():
-            if key.endswith("files") and isinstance(value, dict):
-                new_user_params = dict(value)
-                new_user_params.update(self.user_params)
-                self.user_params = new_user_params
-                break
-
-    @staticmethod
-    def get_module_reference_name(a_module):
-        """Returns the module's reference name.
-
-        This is largely for backwards compatibility with log parsing. If the
-        module defines ACTS_CONTROLLER_REFERENCE_NAME, it will return that
-        value, or the module's submodule name.
-
-        Args:
-            a_module: Any module. Ideally, a controller module.
-        Returns:
-            A string corresponding to the module's name.
-        """
-        if hasattr(a_module, "ACTS_CONTROLLER_REFERENCE_NAME"):
-            return a_module.ACTS_CONTROLLER_REFERENCE_NAME
-        else:
-            return a_module.__name__.split(".")[-1]
-
-    def register_controller(self, controller_module, required=True, builtin=False):
-        """Registers an ACTS controller module for a test class. Invokes Mobly's
-        implementation of register_controller.
-
-        An ACTS controller module is a Python lib that can be used to control
-        a device, service, or equipment. To be ACTS compatible, a controller
-        module needs to have the following members:
-
-            def create(configs):
-                [Required] Creates controller objects from configurations.
-                Args:
-                    configs: A list of serialized data like string/dict. Each
-                             element of the list is a configuration for a
-                             controller object.
-                Returns:
-                    A list of objects.
-
-            def destroy(objects):
-                [Required] Destroys controller objects created by the create
-                function. Each controller object shall be properly cleaned up
-                and all the resources held should be released, e.g. memory
-                allocation, sockets, file handlers etc.
-                Args:
-                    A list of controller objects created by the create function.
-
-            def get_info(objects):
-                [Optional] Gets info from the controller objects used in a test
-                run. The info will be included in test_result_summary.json under
-                the key "ControllerInfo". Such information could include unique
-                ID, version, or anything that could be useful for describing the
-                test bed and debugging.
-                Args:
-                    objects: A list of controller objects created by the create
-                             function.
-                Returns:
-                    A list of json serializable objects, each represents the
-                    info of a controller object. The order of the info object
-                    should follow that of the input objects.
-        Registering a controller module declares a test class's dependency the
-        controller. If the module config exists and the module matches the
-        controller interface, controller objects will be instantiated with
-        corresponding configs. The module should be imported first.
-
-        Args:
-            controller_module: A module that follows the controller module
-                interface.
-            required: A bool. If True, failing to register the specified
-                controller module raises exceptions. If False, returns None upon
-                failures.
-            builtin: Specifies that the module is a builtin controller module in
-                ACTS. If true, adds itself to test attributes.
-        Returns:
-            A list of controller objects instantiated from controller_module, or
-            None.
-
-        Raises:
-            When required is True, ControllerError is raised if no corresponding
-            config can be found.
-            Regardless of the value of "required", ControllerError is raised if
-            the controller module has already been registered or any other error
-            occurred in the registration process.
-        """
-        module_ref_name = self.get_module_reference_name(controller_module)
-        module_config_name = controller_module.MOBLY_CONTROLLER_CONFIG_NAME
-
-        # Get controller objects from Mobly's register_controller
-        controllers = self._controller_manager.register_controller(
-            controller_module, required=required
-        )
-        if not controllers:
-            return None
-
-        # Log controller information
-        # Implementation of "get_info" is optional for a controller module.
-        if hasattr(controller_module, "get_info"):
-            controller_info = controller_module.get_info(controllers)
-            self.log.info("Controller %s: %s", module_config_name, controller_info)
-
-        if builtin:
-            setattr(self, module_ref_name, controllers)
-        return controllers
-
-    def _setup_class(self):
-        """Proxy function to guarantee the base implementation of setup_class
-        is called.
-        """
-        event_bus.post(TestClassBeginEvent(self))
-        # Import and register the built-in controller modules specified
-        # in testbed config.
-        for module in self._import_builtin_controllers():
-            self.register_controller(module, builtin=True)
-        return self.setup_class()
-
-    def _teardown_class(self):
-        """Proxy function to guarantee the base implementation of teardown_class
-        is called.
-        """
-        super()._teardown_class()
-        event_bus.post(TestClassEndEvent(self, self.results))
-
-    def _setup_test(self, test_name):
-        """Proxy function to guarantee the base implementation of setup_test is
-        called.
-        """
-        self.current_test_name = test_name
-
-        # Skip the test if the consecutive test case failure limit is reached.
-        if self.consecutive_failures == self.consecutive_failure_limit:
-            raise signals.TestError("Consecutive test failure")
-
-        return self.setup_test()
-
-    def setup_test(self):
-        """Setup function that will be called every time before executing each
-        test case in the test class.
-
-        To signal setup failure, return False or raise an exception. If
-        exceptions were raised, the stack trace would appear in log, but the
-        exceptions would not propagate to upper levels.
-
-        Implementation is optional.
-        """
-        return True
-
-    def _teardown_test(self, test_name):
-        """Proxy function to guarantee the base implementation of teardown_test
-        is called.
-        """
-        self.log.debug("Tearing down test %s" % test_name)
-        self.teardown_test()
-
-    def _on_fail(self, record):
-        """Proxy function to guarantee the base implementation of on_fail is
-        called.
-
-        Args:
-            record: The records.TestResultRecord object for the failed test
-                    case.
-        """
-        self.consecutive_failures += 1
-        if record.details:
-            self.log.error(record.details)
-        self.log.info(RESULT_LINE_TEMPLATE, record.test_name, record.result)
-        self.on_fail(record.test_name, record.begin_time)
-
-    def on_fail(self, test_name, begin_time):
-        """A function that is executed upon a test case failure.
-
-        User implementation is optional.
-
-        Args:
-            test_name: Name of the test that triggered this function.
-            begin_time: Logline format timestamp taken when the test started.
-        """
-
-    def _on_pass(self, record):
-        """Proxy function to guarantee the base implementation of on_pass is
-        called.
-
-        Args:
-            record: The records.TestResultRecord object for the passed test
-                    case.
-        """
-        self.consecutive_failures = 0
-        msg = record.details
-        if msg:
-            self.log.info(msg)
-        self.log.info(RESULT_LINE_TEMPLATE, record.test_name, record.result)
-        self.on_pass(record.test_name, record.begin_time)
-
-    def on_pass(self, test_name, begin_time):
-        """A function that is executed upon a test case passing.
-
-        Implementation is optional.
-
-        Args:
-            test_name: Name of the test that triggered this function.
-            begin_time: Logline format timestamp taken when the test started.
-        """
-
-    def _on_skip(self, record):
-        """Proxy function to guarantee the base implementation of on_skip is
-        called.
-
-        Args:
-            record: The records.TestResultRecord object for the skipped test
-                    case.
-        """
-        self.log.info(RESULT_LINE_TEMPLATE, record.test_name, record.result)
-        self.log.info("Reason to skip: %s", record.details)
-        self.on_skip(record.test_name, record.begin_time)
-
-    def on_skip(self, test_name, begin_time):
-        """A function that is executed upon a test case being skipped.
-
-        Implementation is optional.
-
-        Args:
-            test_name: Name of the test that triggered this function.
-            begin_time: Logline format timestamp taken when the test started.
-        """
-
-    def _on_exception(self, record):
-        """Proxy function to guarantee the base implementation of on_exception
-        is called.
-
-        Args:
-            record: The records.TestResultRecord object for the failed test
-                    case.
-        """
-        self.log.exception(record.details)
-        self.on_exception(record.test_name, record.begin_time)
-
-    def on_exception(self, test_name, begin_time):
-        """A function that is executed upon an unhandled exception from a test
-        case.
-
-        Implementation is optional.
-
-        Args:
-            test_name: Name of the test that triggered this function.
-            begin_time: Logline format timestamp taken when the test started.
-        """
-
-    def on_retry(self):
-        """Function to run before retrying a test through get_func_with_retry.
-
-        This function runs when a test is automatically retried. The function
-        can be used to modify internal test parameters, for example, to retry
-        a test with slightly different input variables.
-        """
-
-    def _exec_procedure_func(self, func, tr_record):
-        """Executes a procedure function like on_pass, on_fail etc.
-
-        This function will alternate the 'Result' of the test's record if
-        exceptions happened when executing the procedure function.
-
-        This will let signals.TestAbortAll through so abort_all works in all
-        procedure functions.
-
-        Args:
-            func: The procedure function to be executed.
-            tr_record: The TestResultRecord object associated with the test
-                       case executed.
-        """
-        try:
-            func(tr_record)
-        except signals.TestAbortAll:
-            raise
-        except Exception as e:
-            self.log.exception(
-                "Exception happened when executing %s for %s.",
-                func.__name__,
-                self.current_test_name,
-            )
-            tr_record.add_error(func.__name__, e)
-
-    def exec_one_testcase(self, test_name, test_func):
-        """Executes one test case and update test results.
-
-        Executes one test case, create a records.TestResultRecord object with
-        the execution information, and add the record to the test class's test
-        results.
-
-        Args:
-            test_name: Name of the test.
-            test_func: The test function.
-        """
-        class_name = self.__class__.__name__
-        tr_record = records.TestResultRecord(test_name, class_name)
-        tr_record.test_begin()
-        self.begin_time = int(tr_record.begin_time)
-        self.log_begin_time = tr_record.log_begin_time
-        self.test_name = tr_record.test_name
-        event_bus.post(TestCaseBeginEvent(self, self.test_name))
-        self.log.info("%s %s", TEST_CASE_TOKEN, test_name)
-
-        # Enable test retry if specified in the ACTS config
-        retry_tests = self.user_params.get("retry_tests", [])
-        full_test_name = "%s.%s" % (class_name, self.test_name)
-        if any(name in retry_tests for name in [class_name, full_test_name]):
-            test_func = self.get_func_with_retry(test_func)
-
-        verdict = None
-        test_signal = None
-        try:
-            try:
-                ret = self._setup_test(self.test_name)
-                asserts.assert_true(
-                    ret is not False, "Setup for %s failed." % test_name
-                )
-                verdict = test_func()
-            finally:
-                try:
-                    self._teardown_test(self.test_name)
-                except signals.TestAbortAll:
-                    raise
-                except Exception as e:
-                    self.log.error(traceback.format_exc())
-                    tr_record.add_error("teardown_test", e)
-        except (signals.TestFailure, AssertionError) as e:
-            test_signal = e
-            if self.user_params.get(
-                keys.Config.key_test_failure_tracebacks.value, False
-            ):
-                self.log.exception(e)
-            tr_record.test_fail(e)
-        except signals.TestSkip as e:
-            # Test skipped.
-            test_signal = e
-            tr_record.test_skip(e)
-        except (signals.TestAbortClass, signals.TestAbortAll) as e:
-            # Abort signals, pass along.
-            test_signal = e
-            tr_record.test_fail(e)
-            raise e
-        except signals.TestPass as e:
-            # Explicit test pass.
-            test_signal = e
-            tr_record.test_pass(e)
-        except Exception as e:
-            test_signal = e
-            self.log.error(traceback.format_exc())
-            # Exception happened during test.
-            tr_record.test_error(e)
-        else:
-            if verdict or (verdict is None):
-                # Test passed.
-                tr_record.test_pass()
-                return
-            tr_record.test_fail()
-        finally:
-            tr_record.update_record()
-            try:
-                # Execute post-test procedures
-                result = tr_record.result
-                if result == records.TestResultEnums.TEST_RESULT_PASS:
-                    self._exec_procedure_func(self._on_pass, tr_record)
-                elif result == records.TestResultEnums.TEST_RESULT_FAIL:
-                    self._exec_procedure_func(self._on_fail, tr_record)
-                elif result == records.TestResultEnums.TEST_RESULT_SKIP:
-                    self._exec_procedure_func(self._on_skip, tr_record)
-                elif result == records.TestResultEnums.TEST_RESULT_ERROR:
-                    self._exec_procedure_func(self._on_exception, tr_record)
-                    self._exec_procedure_func(self._on_fail, tr_record)
-            finally:
-                self.results.add_record(tr_record)
-                self.summary_writer.dump(
-                    tr_record.to_dict(), records.TestSummaryEntryType.RECORD
-                )
-                self.current_test_name = None
-                event_bus.post(TestCaseEndEvent(self, self.test_name, test_signal))
-
-    def get_func_with_retry(self, func, attempts=2):
-        """Returns a wrapped test method that re-runs after failure. Return test
-        result upon success. If attempt limit reached, collect all failure
-        messages and raise a TestFailure signal.
-
-        Params:
-            func: The test method
-            attempts: Number of attempts to run test
-
-        Returns: result of the test method
-        """
-        exceptions = self.retryable_exceptions
-
-        def wrapper(*args, **kwargs):
-            error_msgs = []
-            extras = {}
-            retry = False
-            for i in range(attempts):
-                try:
-                    if retry:
-                        self.teardown_test()
-                        self.setup_test()
-                        self.on_retry()
-                    return func(*args, **kwargs)
-                except exceptions as e:
-                    retry = True
-                    msg = "Failure on attempt %d: %s" % (i + 1, e.details)
-                    self.log.warning(msg)
-                    error_msgs.append(msg)
-                    if e.extras:
-                        extras["Attempt %d" % (i + 1)] = e.extras
-            raise signals.TestFailure("\n".join(error_msgs), extras)
-
-        return wrapper
-
-    def run_generated_testcases(
-        self,
-        test_func,
-        settings,
-        args=None,
-        kwargs=None,
-        tag="",
-        name_func=None,
-        format_args=False,
-    ):
-        """Deprecated. Please use pre_run and generate_tests.
-
-        Generated test cases are not written down as functions, but as a list
-        of parameter sets. This way we reduce code repetition and improve
-        test case scalability.
-
-        Args:
-            test_func: The common logic shared by all these generated test
-                       cases. This function should take at least one argument,
-                       which is a parameter set.
-            settings: A list of strings representing parameter sets. These are
-                      usually json strings that get loaded in the test_func.
-            args: Iterable of additional position args to be passed to
-                  test_func.
-            kwargs: Dict of additional keyword args to be passed to test_func
-            tag: Name of this group of generated test cases. Ignored if
-                 name_func is provided and operates properly.
-            name_func: A function that takes a test setting and generates a
-                       proper test name. The test name should be shorter than
-                       utils.MAX_FILENAME_LEN. Names over the limit will be
-                       truncated.
-            format_args: If True, args will be appended as the first argument
-                         in the args list passed to test_func.
-
-        Returns:
-            A list of settings that did not pass.
-        """
-        args = args or ()
-        kwargs = kwargs or {}
-        failed_settings = []
-
-        for setting in settings:
-            test_name = "{} {}".format(tag, setting)
-
-            if name_func:
-                try:
-                    test_name = name_func(setting, *args, **kwargs)
-                except:
-                    self.log.exception(
-                        (
-                            "Failed to get test name from "
-                            "test_func. Fall back to default %s"
-                        ),
-                        test_name,
-                    )
-
-            self.results.requested.append(test_name)
-
-            if len(test_name) > utils.MAX_FILENAME_LEN:
-                test_name = test_name[: utils.MAX_FILENAME_LEN]
-
-            previous_success_cnt = len(self.results.passed)
-
-            if format_args:
-                self.exec_one_testcase(
-                    test_name,
-                    functools.partial(test_func, *(args + (setting,)), **kwargs),
-                )
-            else:
-                self.exec_one_testcase(
-                    test_name,
-                    functools.partial(test_func, *((setting,) + args), **kwargs),
-                )
-
-            if len(self.results.passed) - previous_success_cnt != 1:
-                failed_settings.append(setting)
-
-        return failed_settings
-
-    def _exec_func(self, func, *args):
-        """Executes a function with exception safeguard.
-
-        This will let signals.TestAbortAll through so abort_all works in all
-        procedure functions.
-
-        Args:
-            func: Function to be executed.
-            args: Arguments to be passed to the function.
-
-        Returns:
-            Whatever the function returns, or False if unhandled exception
-            occured.
-        """
-        try:
-            return func(*args)
-        except signals.TestAbortAll:
-            raise
-        except:
-            self.log.exception(
-                "Exception happened when executing %s in %s.", func.__name__, self.TAG
-            )
-            return False
-
-    def _block_all_test_cases(self, tests, reason="Failed class setup"):
-        """
-        Block all passed in test cases.
-        Args:
-            tests: The tests to block.
-            reason: Message describing the reason that the tests are blocked.
-                Default is 'Failed class setup'
-        """
-        for test_name, test_func in tests:
-            signal = signals.TestError(reason)
-            record = records.TestResultRecord(test_name, self.TAG)
-            record.test_begin()
-            if hasattr(test_func, "gather"):
-                signal.extras = test_func.gather()
-            record.test_error(signal)
-            self.results.add_record(record)
-            self.summary_writer.dump(
-                record.to_dict(), records.TestSummaryEntryType.RECORD
-            )
-            self._on_skip(record)
-
-    def run(self, test_names=None):
-        """Runs test cases within a test class by the order they appear in the
-        execution list.
-
-        One of these test cases lists will be executed, shown here in priority
-        order:
-        1. The test_names list, which is passed from cmd line.
-        2. The self.tests list defined in test class. Invalid names are
-           ignored.
-        3. All function that matches test case naming convention in the test
-           class.
-
-        Args:
-            test_names: A list of string that are test case names/patterns
-             requested in cmd line.
-
-        Returns:
-            The test results object of this class.
-        """
-        # Executes pre-setup procedures, like generating test methods.
-        if not self._pre_run():
-            return self.results
-
-        self.register_test_class_event_subscriptions()
-        self.log.info("==========> %s <==========", self.TAG)
-        # Devise the actual test cases to run in the test class.
-        if self.tests:
-            # Specified by run list in class.
-            valid_tests = list(self.tests)
-        else:
-            # No test case specified by user, gather the run list automatically.
-            valid_tests = self.get_existing_test_names()
-        if test_names:
-            # Match test cases with any of the user-specified patterns
-            matches = []
-            for test_name in test_names:
-                for valid_test in valid_tests:
-                    if (
-                        fnmatch.fnmatch(valid_test, test_name)
-                        and valid_test not in matches
-                    ):
-                        matches.append(valid_test)
-        else:
-            matches = valid_tests
-        self.results.requested = matches
-        self.summary_writer.dump(
-            self.results.requested_test_names_dict(),
-            records.TestSummaryEntryType.TEST_NAME_LIST,
-        )
-        tests = self._get_test_methods(matches)
-
-        # Setup for the class.
-        setup_fail = False
-        try:
-            if self._setup_class() is False:
-                self.log.error("Failed to setup %s.", self.TAG)
-                self._block_all_test_cases(tests)
-                setup_fail = True
-        except signals.TestAbortClass:
-            self.log.exception("Test class %s aborted" % self.TAG)
-            setup_fail = True
-        except Exception as e:
-            self.log.exception("Failed to setup %s.", self.TAG)
-            self._block_all_test_cases(tests)
-            setup_fail = True
-        if setup_fail:
-            self._exec_func(self._teardown_class)
-            self.log.info(
-                "Summary for test class %s: %s", self.TAG, self.results.summary_str()
-            )
-            return self.results
-
-        # Run tests in order.
-        test_case_iterations = self.user_params.get(
-            keys.Config.key_test_case_iterations.value, 1
-        )
-        if any(
-            [
-                substr in self.__class__.__name__
-                for substr in ["Preflight", "Postflight"]
-            ]
-        ):
-            test_case_iterations = 1
-        try:
-            for test_name, test_func in tests:
-                for _ in range(test_case_iterations):
-                    self.exec_one_testcase(test_name, test_func)
-            return self.results
-        except signals.TestAbortClass:
-            self.log.exception("Test class %s aborted" % self.TAG)
-            return self.results
-        except signals.TestAbortAll as e:
-            # Piggy-back test results on this exception object so we don't lose
-            # results from this test class.
-            setattr(e, "results", self.results)
-            raise e
-        finally:
-            self._exec_func(self._teardown_class)
-            self.log.info(
-                "Summary for test class %s: %s", self.TAG, self.results.summary_str()
-            )
-
-    def _ad_take_bugreport(self, ad, test_name, begin_time):
-        for i in range(3):
-            try:
-                ad.take_bug_report(test_name, begin_time)
-                return True
-            except Exception as e:
-                ad.log.error("bugreport attempt %s error: %s", i + 1, e)
-
-    def _ad_take_extra_logs(self, ad, test_name, begin_time):
-        result = True
-        if getattr(ad, "qxdm_log", False):
-            # Gather qxdm log modified 3 minutes earlier than test start time
-            if begin_time:
-                qxdm_begin_time = begin_time - 1000 * 60 * 3
-            else:
-                qxdm_begin_time = None
-            try:
-                ad.get_qxdm_logs(test_name, qxdm_begin_time)
-            except Exception as e:
-                ad.log.error(
-                    "Failed to get QXDM log for %s with error %s", test_name, e
-                )
-                result = False
-
-        try:
-            ad.check_crash_report(test_name, begin_time, log_crash_report=True)
-        except Exception as e:
-            ad.log.error(
-                "Failed to check crash report for %s with error %s", test_name, e
-            )
-            result = False
-        return result
-
-    def _skip_bug_report(self, test_name):
-        """A function to check whether we should skip creating a bug report.
-
-        Args:
-            test_name: The test case name
-
-        Returns: True if bug report is to be skipped.
-        """
-        if "no_bug_report_on_fail" in self.user_params:
-            return True
-
-        # If the current test class or test case is found in the set of
-        # problematic tests, we skip bugreport and other failure artifact
-        # creation.
-        class_name = self.__class__.__name__
-        quiet_tests = self.user_params.get("quiet_tests", [])
-        if class_name in quiet_tests:
-            self.log.info("Skipping bug report, as directed for this test class.")
-            return True
-        full_test_name = "%s.%s" % (class_name, test_name)
-        if full_test_name in quiet_tests:
-            self.log.info("Skipping bug report, as directed for this test case.")
-            return True
-
-        # Once we hit a certain log path size, it's not going to get smaller.
-        # We cache the result so we don't have to keep doing directory walks.
-        if self.size_limit_reached:
-            return True
-        try:
-            max_log_size = int(
-                self.user_params.get("soft_output_size_limit") or "invalid"
-            )
-            log_path = getattr(logging, "log_path", None)
-            if log_path:
-                curr_log_size = utils.get_directory_size(log_path)
-                if curr_log_size > max_log_size:
-                    self.log.info(
-                        "Skipping bug report, as we've reached the size limit."
-                    )
-                    self.size_limit_reached = True
-                    return True
-        except ValueError:
-            pass
-        return False
-
-    def _take_bug_report(self, test_name, begin_time):
-        if self._skip_bug_report(test_name):
-            return
-
-        executor = ThreadPoolExecutor(max_workers=10)
-        for ad in getattr(self, "android_devices", []):
-            executor.submit(self._ad_take_bugreport, ad, test_name, begin_time)
-            executor.submit(self._ad_take_extra_logs, ad, test_name, begin_time)
-        executor.shutdown()
-
-    def _reboot_device(self, ad):
-        ad.log.info("Rebooting device.")
-        ad = ad.reboot()
-
-    def _cleanup_logger_sessions(self):
-        for mylogger, session in self.logger_sessions:
-            self.log.info("Resetting a diagnostic session %s, %s", mylogger, session)
-            mylogger.reset()
-        self.logger_sessions = []
-
-    def _pull_diag_logs(self, test_name, begin_time):
-        for mylogger, session in self.logger_sessions:
-            self.log.info("Pulling diagnostic session %s", mylogger)
-            mylogger.stop(session)
-            diag_path = os.path.join(
-                self.log_path, logger.epoch_to_log_line_timestamp(begin_time)
-            )
-            os.makedirs(diag_path, exist_ok=True)
-            mylogger.pull(session, diag_path)
-
-    def register_test_class_event_subscriptions(self):
-        self.class_subscriptions = subscription_bundle.create_from_instance(self)
-        self.class_subscriptions.register()
-
-    def unregister_test_class_event_subscriptions(self):
-        for package in self.all_subscriptions:
-            package.unregister()
diff --git a/src/antlion/bin/__init__.py b/src/antlion/bin/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/bin/__init__.py
+++ /dev/null
diff --git a/src/antlion/bin/act.py b/src/antlion/bin/act.py
deleted file mode 100755
index 2f78645..0000000
--- a/src/antlion/bin/act.py
+++ /dev/null
@@ -1,272 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import argparse
-import os
-import re
-import signal
-import sys
-import traceback
-
-from mobly import config_parser as mobly_config_parser
-
-from antlion import config_parser
-from antlion import keys
-from antlion import signals
-from antlion import test_runner
-from antlion import utils
-from antlion.config_parser import ActsConfigError
-
-
-def _run_test(parsed_config, test_identifiers, repeat=1):
-    """Instantiate and runs test_runner.TestRunner.
-
-    This is the function to start separate processes with.
-
-    Args:
-        parsed_config: A mobly.config_parser.TestRunConfig that is a set of
-                       configs for one test_runner.TestRunner.
-        test_identifiers: A list of tuples, each identifies what test case to
-                          run on what test class.
-        repeat: Number of times to iterate the specified tests.
-
-    Returns:
-        True if all tests passed without any error, False otherwise.
-    """
-    runner = _create_test_runner(parsed_config, test_identifiers)
-    try:
-        for i in range(repeat):
-            runner.run()
-        return runner.results.is_all_pass
-    except signals.TestAbortAll:
-        return True
-    except:
-        print("Exception when executing %s, iteration %s." % (runner.testbed_name, i))
-        print(traceback.format_exc())
-    finally:
-        runner.stop()
-
-
-def _create_test_runner(parsed_config, test_identifiers):
-    """Instantiates one test_runner.TestRunner object and register termination
-    signal handlers that properly shut down the test_runner.TestRunner run.
-
-    Args:
-        parsed_config: A mobly.config_parser.TestRunConfig that is a set of
-                       configs for one test_runner.TestRunner.
-        test_identifiers: A list of tuples, each identifies what test case to
-                          run on what test class.
-
-    Returns:
-        A test_runner.TestRunner object.
-    """
-    try:
-        t = test_runner.TestRunner(parsed_config, test_identifiers)
-    except:
-        print("Failed to instantiate test runner, abort.")
-        print(traceback.format_exc())
-        sys.exit(1)
-    # Register handler for termination signals.
-    handler = config_parser.gen_term_signal_handler([t])
-    signal.signal(signal.SIGTERM, handler)
-    signal.signal(signal.SIGINT, handler)
-    return t
-
-
-def _run_tests(parsed_configs, test_identifiers, repeat):
-    """Executes requested tests sequentially.
-
-    Requested test runs will commence one after another according to the order
-    of their corresponding configs.
-
-    Args:
-        parsed_configs: A list of mobly.config_parser.TestRunConfig, each is a
-                        set of configs for one test_runner.TestRunner.
-        test_identifiers: A list of tuples, each identifies what test case to
-                          run on what test class.
-        repeat: Number of times to iterate the specified tests.
-
-    Returns:
-        True if all test runs executed successfully, False otherwise.
-    """
-    ok = True
-    for c in parsed_configs:
-        try:
-            ret = _run_test(c, test_identifiers, repeat)
-            ok = ok and ret
-        except Exception as e:
-            print(
-                "Exception occurred when executing test bed %s. %s"
-                % (c.testbed_name, e)
-            )
-    return ok
-
-
-def main():
-    """This is the default implementation of a cli entry point for ACTS test
-    execution.
-
-    Or you could implement your own cli entry point using acts.config_parser
-    functions and acts.test_runner.execute_one_test_class.
-    """
-    parser = argparse.ArgumentParser(
-        description=(
-            "Specify tests to run. If nothing specified, " "run all test cases found."
-        )
-    )
-    parser.add_argument(
-        "-c",
-        "--config",
-        type=str,
-        required=True,
-        metavar="<PATH>",
-        help="Path to the test configuration file.",
-    )
-    parser.add_argument(
-        "-ci",
-        "--campaign_iterations",
-        metavar="<CAMPAIGN_ITERATIONS>",
-        nargs="?",
-        type=int,
-        const=1,
-        default=1,
-        help="Number of times to run the campaign or a group of test cases.",
-    )
-    parser.add_argument(
-        "-tb",
-        "--testbed",
-        nargs="+",
-        type=str,
-        metavar="[<TEST BED NAME1> <TEST BED NAME2> ...]",
-        help="Specify which test beds to run tests on.",
-    )
-    parser.add_argument(
-        "-lp",
-        "--logpath",
-        type=str,
-        metavar="<PATH>",
-        help="Root path under which all logs will be placed.",
-    )
-    parser.add_argument(
-        "-tp",
-        "--testpaths",
-        nargs="*",
-        type=str,
-        metavar="<PATH> <PATH>",
-        help="One or more non-recursive test class search paths.",
-    )
-
-    group = parser.add_mutually_exclusive_group(required=True)
-    group.add_argument(
-        "-tc",
-        "--testclass",
-        nargs="+",
-        type=str,
-        metavar="[TestClass1 TestClass2:test_xxx ...]",
-        help="A list of test classes/cases to run.",
-    )
-    group.add_argument(
-        "-tf",
-        "--testfile",
-        nargs=1,
-        type=str,
-        metavar="<PATH>",
-        help=(
-            "Path to a file containing a comma delimited list of test "
-            "classes to run."
-        ),
-    )
-    parser.add_argument(
-        "-ti",
-        "--test_case_iterations",
-        metavar="<TEST_CASE_ITERATIONS>",
-        nargs="?",
-        type=int,
-        help="Number of times to run every test case.",
-    )
-
-    args = parser.parse_args(sys.argv[1:])
-    test_list = None
-    if args.testfile:
-        test_list = config_parser.parse_test_file(args.testfile[0])
-    elif args.testclass:
-        test_list = args.testclass
-
-    config = args.config
-
-    if config.endswith(".json"):
-        print(
-            "DEPRECATION NOTICE: Converting ACTS JSON to Mobly YAML. ACTS is "
-            + "deprecated. Support will be removed in the next release."
-        )
-        config = utils.acts_json_to_mobly_yaml(config)
-        print(f"Wrote YAML config to {config}")
-
-    parsed_configs = mobly_config_parser.load_test_config_file(config, args.testbed)
-
-    for test_run_config in parsed_configs:
-        if args.testpaths:
-            tp_key = keys.Config.key_test_paths.value
-            test_run_config.controller_configs[tp_key] = args.testpaths
-        if args.logpath:
-            test_run_config.log_path = args.logpath
-        if args.test_case_iterations:
-            ti_key = keys.Config.key_test_case_iterations.value
-            test_run_config.user_params[ti_key] = args.test_case_iterations
-
-        # Sets the --testpaths flag to the default test directory if left unset.
-        testpath_key = keys.Config.key_test_paths.value
-        if (
-            testpath_key not in test_run_config.controller_configs
-            or test_run_config.controller_configs[testpath_key] is None
-        ):
-            test_run_config.controller_configs[testpath_key] = [
-                os.path.join(os.path.dirname(__file__), "../tests/"),
-            ]
-
-        for path in test_run_config.controller_configs[testpath_key]:
-            path = utils.abs_path(path)
-
-        # TODO(markdr): Find a way to merge this with the validation done in
-        # Mobly's load_test_config_file.
-        if not test_run_config.log_path:
-            raise ActsConfigError(
-                "Required key %s missing in test config."
-                % keys.Config.key_log_path.value
-            )
-        test_run_config.log_path = utils.abs_path(test_run_config.log_path)
-
-    # Prepare args for test runs
-    test_identifiers = config_parser.parse_test_list(test_list)
-
-    print(
-        "\n\nDEPRECATION NOTICE: Running antlion tests with act.py is "
-        "deprecated and will be removed in the next release. Please migrate "
-        "by using Mobly YAML configs and executing the test class directly:\n\n"
-    )
-    for test_class, _ in test_identifiers:
-        print(f"   python {test_class}.py -c {config}")
-    print("\n")
-
-    exec_result = _run_tests(parsed_configs, test_identifiers, args.campaign_iterations)
-    if exec_result is False:
-        # return 1 upon test failure.
-        sys.exit(1)
-    sys.exit(0)
-
-
-if __name__ == "__main__":
-    main()
diff --git a/src/antlion/capabilities/ssh.py b/src/antlion/capabilities/ssh.py
deleted file mode 100644
index eeb1e16..0000000
--- a/src/antlion/capabilities/ssh.py
+++ /dev/null
@@ -1,377 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2023 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import subprocess
-import time
-
-from dataclasses import dataclass
-from typing import List, Union, BinaryIO
-
-from antlion import logger
-from antlion import signals
-from antlion.net import wait_for_port
-
-DEFAULT_SSH_PORT: int = 22
-DEFAULT_SSH_TIMEOUT_SEC: int = 60
-DEFAULT_SSH_CONNECT_TIMEOUT_SEC: int = 90
-DEFAULT_SSH_SERVER_ALIVE_INTERVAL: int = 30
-# The default package repository for all components.
-
-
-class SSHResult:
-    """Result of an SSH command."""
-
-    def __init__(
-        self, process: Union[subprocess.CompletedProcess, subprocess.CalledProcessError]
-    ) -> None:
-        self._raw_stdout = process.stdout
-        self._stderr = process.stderr.decode("utf-8", errors="replace")
-        self._exit_status: int = process.returncode
-
-    def __str__(self):
-        if self.exit_status == 0:
-            return self.stdout
-        return f'status {self.exit_status}, stdout: "{self.stdout}", stderr: "{self.stderr}"'
-
-    @property
-    def stdout(self) -> str:
-        if not hasattr(self, "_stdout"):
-            self._stdout = self._raw_stdout.decode("utf-8", errors="replace")
-        return self._stdout
-
-    @property
-    def stderr(self) -> str:
-        return self._stderr
-
-    @property
-    def exit_status(self) -> int:
-        return self._exit_status
-
-    @property
-    def raw_stdout(self) -> bytes:
-        return self._raw_stdout
-
-
-class SSHError(signals.TestError):
-    """A SSH command returned with a non-zero status code."""
-
-    def __init__(self, command: str, result: SSHResult):
-        super().__init__(f'SSH command "{command}" unexpectedly returned {result}')
-        self.result = result
-
-
-class SSHTimeout(signals.TestError):
-    """A SSH command timed out."""
-
-    def __init__(self, err: subprocess.TimeoutExpired):
-        super().__init__(
-            f'SSH command "{err.cmd}" timed out after {err.timeout}s, '
-            f'stdout="{err.stdout}", stderr="{err.stderr}"'
-        )
-
-
-class SSHTransportError(signals.TestError):
-    """Failure to send an SSH command."""
-
-
-@dataclass
-class SSHConfig:
-    """SSH client config."""
-
-    # SSH flags. See ssh(1) for full details.
-    user: str
-    host_name: str
-    identity_file: str
-
-    ssh_binary: str = "ssh"
-    config_file: str = "/dev/null"
-    port: int = 22
-
-    # SSH options. See ssh_config(5) for full details.
-    connect_timeout: int = DEFAULT_SSH_CONNECT_TIMEOUT_SEC
-    server_alive_interval: int = DEFAULT_SSH_SERVER_ALIVE_INTERVAL
-    strict_host_key_checking: bool = False
-    user_known_hosts_file: str = "/dev/null"
-    log_level: str = "ERROR"
-
-    def full_command(self, command: str, force_tty: bool = False) -> List[str]:
-        """Generate the complete command to execute command over SSH.
-
-        Args:
-            command: The command to run over SSH
-            force_tty: Force pseudo-terminal allocation. This can be used to
-                execute arbitrary screen-based programs on a remote machine,
-                which can be very useful, e.g. when implementing menu services.
-
-        Returns:
-            Arguments composing the complete call to SSH.
-        """
-        optional_flags = []
-        if force_tty:
-            # Multiple -t options force tty allocation, even if ssh has no local
-            # tty. This is necessary for launching ssh with subprocess without
-            # shell=True.
-            optional_flags.append("-tt")
-
-        return (
-            [
-                self.ssh_binary,
-                # SSH flags
-                "-i",
-                self.identity_file,
-                "-F",
-                self.config_file,
-                "-p",
-                str(self.port),
-                # SSH configuration options
-                "-o",
-                f"ConnectTimeout={self.connect_timeout}",
-                "-o",
-                f"ServerAliveInterval={self.server_alive_interval}",
-                "-o",
-                f'StrictHostKeyChecking={"yes" if self.strict_host_key_checking else "no"}',
-                "-o",
-                f"UserKnownHostsFile={self.user_known_hosts_file}",
-                "-o",
-                f"LogLevel={self.log_level}",
-            ]
-            + optional_flags
-            + [f"{self.user}@{self.host_name}"]
-            + command.split()
-        )
-
-
-class SSHProvider:
-    """Device-specific provider for SSH clients."""
-
-    def __init__(self, config: SSHConfig) -> None:
-        """
-        Args:
-            config: SSH client config
-        """
-        logger_tag = f"ssh | {config.host_name}"
-        if config.port != DEFAULT_SSH_PORT:
-            logger_tag += f":{config.port}"
-
-        # Check if the private key exists
-
-        self.log = logger.create_tagged_trace_logger(logger_tag)
-        self.config = config
-
-        try:
-            self.wait_until_reachable()
-            self.log.info("sshd is reachable")
-        except Exception as e:
-            raise TimeoutError("sshd is unreachable") from e
-
-    def wait_until_reachable(self) -> None:
-        """Wait for the device to become reachable via SSH.
-
-        Raises:
-            TimeoutError: connect_timeout has expired without a successful SSH
-                connection to the device
-            SSHTransportError: SSH is available on the device but
-                connect_timeout has expired and SSH fails to run
-            SSHTimeout: SSH is available on the device but connect_timeout has
-                expired and SSH takes too long to run a command
-        """
-        timeout_sec = self.config.connect_timeout
-        timeout = time.time() + timeout_sec
-        wait_for_port(self.config.host_name, self.config.port, timeout_sec=timeout_sec)
-
-        while True:
-            try:
-                self._run("echo", timeout_sec, False, None)
-                return
-            except SSHTransportError as e:
-                # Repeat if necessary; _run() can exit prematurely by receiving
-                # SSH transport errors. These errors can be caused by sshd not
-                # being fully initialized yet.
-                if time.time() < timeout:
-                    continue
-                else:
-                    raise e
-
-    def wait_until_unreachable(
-        self, interval_sec: int = 1, timeout_sec: int = DEFAULT_SSH_CONNECT_TIMEOUT_SEC
-    ) -> None:
-        """Wait for the device to become unreachable via SSH.
-
-        Args:
-            interval_sec: Seconds to wait between unreachability attempts
-            timeout_sec: Seconds to wait until raising TimeoutError
-
-        Raises:
-            TimeoutError: when timeout_sec has expired without an unsuccessful
-                SSH connection to the device
-        """
-        timeout = time.time() + timeout_sec
-
-        while True:
-            try:
-                wait_for_port(
-                    self.config.host_name, self.config.port, timeout_sec=interval_sec
-                )
-            except TimeoutError:
-                return
-
-            if time.time() < timeout:
-                raise TimeoutError(
-                    f"Connection to {self.config.host_name} is still reachable "
-                    f"after {timeout_sec}s"
-                )
-
-    def run(
-        self,
-        command: str,
-        timeout_sec: int = DEFAULT_SSH_TIMEOUT_SEC,
-        connect_retries: int = 3,
-        force_tty: bool = False,
-    ) -> SSHResult:
-        """Run a command on the device then exit.
-
-        Args:
-            command: String to send to the device.
-            timeout_sec: Seconds to wait for the command to complete.
-            connect_retries: Amount of times to retry connect on fail.
-            force_tty: Force pseudo-terminal allocation.
-
-        Raises:
-            SSHError: if the SSH command returns a non-zero status code
-            SSHTransportError: if SSH fails to run the command
-            SSHTimeout: if there is no response within timeout_sec
-
-        Returns:
-            SSHResults from the executed command.
-        """
-        return self._run_with_retry(
-            command, timeout_sec, connect_retries, force_tty, stdin=None
-        )
-
-    def _run_with_retry(
-        self,
-        command: str,
-        timeout_sec: int,
-        connect_retries: int,
-        force_tty: bool,
-        stdin: BinaryIO,
-    ) -> SSHResult:
-        err: Exception = ValueError("connect_retries cannot be 0")
-        for i in range(0, connect_retries):
-            try:
-                return self._run(command, timeout_sec, force_tty, stdin)
-            except SSHTransportError as e:
-                err = e
-                self.log.warn(f"Connect failed: {e}")
-        raise err
-
-    def _run(
-        self, command: str, timeout_sec: int, force_tty: bool, stdin: BinaryIO
-    ) -> SSHResult:
-        full_command = self.config.full_command(command, force_tty)
-        self.log.debug(
-            f'Running "{command}" (full command: "{" ".join(full_command)}")'
-        )
-        try:
-            process = subprocess.run(
-                full_command,
-                capture_output=True,
-                timeout=timeout_sec,
-                check=True,
-                stdin=stdin,
-            )
-        except subprocess.CalledProcessError as e:
-            if e.returncode == 255:
-                stderr = e.stderr.decode("utf-8", errors="replace")
-                if (
-                    "Name or service not known" in stderr
-                    or "Host does not exist" in stderr
-                ):
-                    raise SSHTransportError(
-                        f"Hostname {self.config.host_name} cannot be resolved to an address"
-                    ) from e
-                if "Connection timed out" in stderr:
-                    raise SSHTransportError(
-                        f"Failed to establish a connection to {self.config.host_name} within {timeout_sec}s"
-                    ) from e
-                if "Connection refused" in stderr:
-                    raise SSHTransportError(
-                        f"Connection refused by {self.config.host_name}"
-                    ) from e
-
-            raise SSHError(command, SSHResult(e)) from e
-        except subprocess.TimeoutExpired as e:
-            raise SSHTimeout(e) from e
-
-        return SSHResult(process)
-
-    def upload_file(
-        self,
-        local_path: str,
-        remote_path: str,
-        timeout_sec: int = DEFAULT_SSH_TIMEOUT_SEC,
-        connect_retries: int = 3,
-    ) -> None:
-        """Upload a file to the device.
-
-        Args:
-            local_path: Path to the file to upload
-            remote_path: Path on the remote device to place the uploaded file.
-            timeout_sec: Seconds to wait for the command to complete.
-            connect_retries: Amount of times to retry connect on fail.
-
-        Raises:
-            SSHError: if the SSH upload returns a non-zero status code
-            SSHTransportError: if SSH fails to run the upload command
-            SSHTimeout: if there is no response within timeout_sec
-        """
-        file = open(local_path, "rb")
-        self._run_with_retry(
-            f"cat > {remote_path}",
-            timeout_sec,
-            connect_retries,
-            force_tty=False,
-            stdin=file,
-        )
-
-    def download_file(
-        self,
-        remote_path: str,
-        local_path: str,
-        timeout_sec: int = DEFAULT_SSH_TIMEOUT_SEC,
-        connect_retries: int = 3,
-    ) -> None:
-        """Upload a file to the device.
-
-        Args:
-            remote_path: Path on the remote device to download.
-            local_path: Path on the host to the place the downloaded file.
-            timeout_sec: Seconds to wait for the command to complete.
-            connect_retries: Amount of times to retry connect on fail.
-
-        Raises:
-            SSHError: if the SSH command returns a non-zero status code
-            SSHTransportError: if SSH fails to run the command
-            SSHTimeout: if there is no response within timeout_sec
-        """
-        file = open(local_path, "rb")
-        return self._run_with_retry(
-            f"cat > {remote_path}",
-            timeout_sec,
-            connect_retries,
-            force_tty=False,
-            stdin=file,
-        )
diff --git a/src/antlion/config_parser.py b/src/antlion/config_parser.py
deleted file mode 100755
index 7f202ff..0000000
--- a/src/antlion/config_parser.py
+++ /dev/null
@@ -1,250 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import itertools
-import os
-import sys
-
-import mobly.config_parser as mobly_config_parser
-
-from antlion import keys
-from antlion import utils
-
-# An environment variable defining the base location for ACTS logs.
-_ENV_ACTS_LOGPATH = "ACTS_LOGPATH"
-# An environment variable that enables test case failures to log stack traces.
-_ENV_TEST_FAILURE_TRACEBACKS = "ACTS_TEST_FAILURE_TRACEBACKS"
-# An environment variable defining the test search paths for ACTS.
-_ENV_ACTS_TESTPATHS = "ACTS_TESTPATHS"
-_PATH_SEPARATOR = ":"
-
-
-class ActsConfigError(Exception):
-    """Raised when there is a problem in test configuration file."""
-
-
-def _validate_test_config(test_config):
-    """Validates the raw configuration loaded from the config file.
-
-    Making sure all the required fields exist.
-    """
-    for k in keys.Config.reserved_keys.value:
-        # TODO(markdr): Remove this continue after merging this with the
-        # validation done in Mobly's load_test_config_file.
-        if k == keys.Config.key_test_paths.value or k == keys.Config.key_log_path.value:
-            continue
-
-        if k not in test_config:
-            raise ActsConfigError("Required key %s missing in test config." % k)
-
-
-def _validate_testbed_name(name):
-    """Validates the name of a test bed.
-
-    Since test bed names are used as part of the test run id, it needs to meet
-    certain requirements.
-
-    Args:
-        name: The test bed's name specified in config file.
-
-    Raises:
-        If the name does not meet any criteria, ActsConfigError is raised.
-    """
-    if not name:
-        raise ActsConfigError("Test bed names can't be empty.")
-    if not isinstance(name, str):
-        raise ActsConfigError("Test bed names have to be string.")
-    for l in name:
-        if l not in utils.valid_filename_chars:
-            raise ActsConfigError("Char '%s' is not allowed in test bed names." % l)
-
-
-def _validate_testbed_configs(testbed_configs):
-    """Validates the testbed configurations.
-
-    Args:
-        testbed_configs: A list of testbed configuration json objects.
-
-    Raises:
-        If any part of the configuration is invalid, ActsConfigError is raised.
-    """
-    # Cross checks testbed configs for resource conflicts.
-    for name in testbed_configs:
-        _validate_testbed_name(name)
-
-
-def gen_term_signal_handler(test_runners):
-    def termination_sig_handler(signal_num, frame):
-        print("Received sigterm %s." % signal_num)
-        for t in test_runners:
-            t.stop()
-        sys.exit(1)
-
-    return termination_sig_handler
-
-
-def _parse_one_test_specifier(item):
-    """Parse one test specifier from command line input.
-
-    Args:
-        item: A string that specifies a test class or test cases in one test
-            class to run.
-
-    Returns:
-        A tuple of a string and a list of strings. The string is the test class
-        name, the list of strings is a list of test case names. The list can be
-        None.
-    """
-    tokens = item.split(":")
-    if len(tokens) > 2:
-        raise ActsConfigError("Syntax error in test specifier %s" % item)
-    if len(tokens) == 1:
-        # This should be considered a test class name
-        test_cls_name = tokens[0]
-        return test_cls_name, None
-    elif len(tokens) == 2:
-        # This should be considered a test class name followed by
-        # a list of test case names.
-        test_cls_name, test_case_names = tokens
-        clean_names = [elem.strip() for elem in test_case_names.split(",")]
-        return test_cls_name, clean_names
-
-
-def parse_test_list(test_list):
-    """Parse user provided test list into internal format for test_runner.
-
-    Args:
-        test_list: A list of test classes/cases.
-    """
-    result = []
-    for elem in test_list:
-        result.append(_parse_one_test_specifier(elem))
-    return result
-
-
-def load_test_config_file(test_config_path, tb_filters=None):
-    """Processes the test configuration file provided by the user.
-
-    Loads the configuration file into a json object, unpacks each testbed
-    config into its own TestRunConfig object, and validate the configuration in
-    the process.
-
-    Args:
-        test_config_path: Path to the test configuration file.
-        tb_filters: A subset of test bed names to be pulled from the config
-                    file. If None, then all test beds will be selected.
-
-    Returns:
-        A list of mobly.config_parser.TestRunConfig objects to be passed to
-        test_runner.TestRunner.
-    """
-    configs = utils.load_config(test_config_path)
-
-    testbeds = configs[keys.Config.key_testbed.value]
-    if type(testbeds) is list:
-        tb_dict = dict()
-        for testbed in testbeds:
-            tb_dict[testbed[keys.Config.key_testbed_name.value]] = testbed
-        testbeds = tb_dict
-    elif type(testbeds) is dict:
-        # For compatibility, make sure the entry name is the same as
-        # the testbed's "name" entry
-        for name, testbed in testbeds.items():
-            testbed[keys.Config.key_testbed_name.value] = name
-
-    if tb_filters:
-        tbs = {}
-        for name in tb_filters:
-            if name in testbeds:
-                tbs[name] = testbeds[name]
-            else:
-                raise ActsConfigError(
-                    'Expected testbed named "%s", but none was found. Check '
-                    "if you have the correct testbed names." % name
-                )
-        testbeds = tbs
-
-    if (
-        keys.Config.key_log_path.value not in configs
-        and _ENV_ACTS_LOGPATH in os.environ
-    ):
-        print("Using environment log path: %s" % (os.environ[_ENV_ACTS_LOGPATH]))
-        configs[keys.Config.key_log_path.value] = os.environ[_ENV_ACTS_LOGPATH]
-    if (
-        keys.Config.key_test_paths.value not in configs
-        and _ENV_ACTS_TESTPATHS in os.environ
-    ):
-        print("Using environment test paths: %s" % (os.environ[_ENV_ACTS_TESTPATHS]))
-        configs[keys.Config.key_test_paths.value] = os.environ[
-            _ENV_ACTS_TESTPATHS
-        ].split(_PATH_SEPARATOR)
-    if (
-        keys.Config.key_test_failure_tracebacks not in configs
-        and _ENV_TEST_FAILURE_TRACEBACKS in os.environ
-    ):
-        configs[keys.Config.key_test_failure_tracebacks.value] = os.environ[
-            _ENV_TEST_FAILURE_TRACEBACKS
-        ]
-
-    # TODO: See if there is a better way to do this: b/29836695
-    config_path, _ = os.path.split(utils.abs_path(test_config_path))
-    configs[keys.Config.key_config_path.value] = config_path
-    _validate_test_config(configs)
-    _validate_testbed_configs(testbeds)
-    # Unpack testbeds into separate json objects.
-    configs.pop(keys.Config.key_testbed.value)
-    test_run_configs = []
-
-    for _, testbed in testbeds.items():
-        test_run_config = mobly_config_parser.TestRunConfig()
-        test_run_config.testbed_name = testbed[keys.Config.key_testbed_name.value]
-        test_run_config.controller_configs = testbed
-        test_run_config.controller_configs[
-            keys.Config.key_test_paths.value
-        ] = configs.get(keys.Config.key_test_paths.value, None)
-        test_run_config.log_path = configs.get(keys.Config.key_log_path.value, None)
-        if test_run_config.log_path is not None:
-            test_run_config.log_path = utils.abs_path(test_run_config.log_path)
-
-        user_param_pairs = []
-        for item in itertools.chain(configs.items(), testbed.items()):
-            if item[0] not in keys.Config.reserved_keys.value:
-                user_param_pairs.append(item)
-        test_run_config.user_params = dict(user_param_pairs)
-
-        test_run_configs.append(test_run_config)
-    return test_run_configs
-
-
-def parse_test_file(fpath):
-    """Parses a test file that contains test specifiers.
-
-    Args:
-        fpath: A string that is the path to the test file to parse.
-
-    Returns:
-        A list of strings, each is a test specifier.
-    """
-    with open(fpath, "r") as f:
-        tf = []
-        for line in f:
-            line = line.strip()
-            if not line:
-                continue
-            if len(tf) and (tf[-1].endswith(":") or tf[-1].endswith(",")):
-                tf[-1] += line
-            else:
-                tf.append(line)
-        return tf
diff --git a/src/antlion/controllers/android_lib/tel/__init__.py b/src/antlion/controllers/android_lib/tel/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/android_lib/tel/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/android_lib/tel/tel_utils.py b/src/antlion/controllers/android_lib/tel/tel_utils.py
deleted file mode 100644
index c18741c..0000000
--- a/src/antlion/controllers/android_lib/tel/tel_utils.py
+++ /dev/null
@@ -1,690 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Generic telephony utility functions. Cloned from test_utils.tel."""
-
-import re
-import struct
-import time
-from queue import Empty
-
-from antlion.logger import epoch_to_log_line_timestamp
-from antlion.controllers.adb_lib.error import AdbCommandError
-
-INCALL_UI_DISPLAY_FOREGROUND = "foreground"
-INCALL_UI_DISPLAY_BACKGROUND = "background"
-INCALL_UI_DISPLAY_DEFAULT = "default"
-
-# Max time to wait after caller make a call and before
-# callee start ringing
-MAX_WAIT_TIME_ACCEPT_CALL_TO_OFFHOOK_EVENT = 30
-
-# Max time to wait after toggle airplane mode and before
-# get expected event
-MAX_WAIT_TIME_AIRPLANEMODE_EVENT = 90
-
-# Wait time between state check retry
-WAIT_TIME_BETWEEN_STATE_CHECK = 5
-
-# Constant for Data Roaming State
-DATA_ROAMING_ENABLE = 1
-DATA_ROAMING_DISABLE = 0
-
-# Constant for Telephony Manager Call State
-TELEPHONY_STATE_RINGING = "RINGING"
-TELEPHONY_STATE_IDLE = "IDLE"
-TELEPHONY_STATE_OFFHOOK = "OFFHOOK"
-TELEPHONY_STATE_UNKNOWN = "UNKNOWN"
-
-# Constant for Service State
-SERVICE_STATE_EMERGENCY_ONLY = "EMERGENCY_ONLY"
-SERVICE_STATE_IN_SERVICE = "IN_SERVICE"
-SERVICE_STATE_OUT_OF_SERVICE = "OUT_OF_SERVICE"
-SERVICE_STATE_POWER_OFF = "POWER_OFF"
-SERVICE_STATE_UNKNOWN = "UNKNOWN"
-
-# Constant for Network Mode
-NETWORK_MODE_GSM_ONLY = "NETWORK_MODE_GSM_ONLY"
-NETWORK_MODE_WCDMA_ONLY = "NETWORK_MODE_WCDMA_ONLY"
-NETWORK_MODE_LTE_ONLY = "NETWORK_MODE_LTE_ONLY"
-
-# Constant for Events
-EVENT_CALL_STATE_CHANGED = "CallStateChanged"
-EVENT_SERVICE_STATE_CHANGED = "ServiceStateChanged"
-
-
-class CallStateContainer:
-    INCOMING_NUMBER = "incomingNumber"
-    SUBSCRIPTION_ID = "subscriptionId"
-    CALL_STATE = "callState"
-
-
-class ServiceStateContainer:
-    VOICE_REG_STATE = "voiceRegState"
-    VOICE_NETWORK_TYPE = "voiceNetworkType"
-    DATA_REG_STATE = "dataRegState"
-    DATA_NETWORK_TYPE = "dataNetworkType"
-    OPERATOR_NAME = "operatorName"
-    OPERATOR_ID = "operatorId"
-    IS_MANUAL_NW_SELECTION = "isManualNwSelection"
-    ROAMING = "roaming"
-    IS_EMERGENCY_ONLY = "isEmergencyOnly"
-    NETWORK_ID = "networkId"
-    SYSTEM_ID = "systemId"
-    SUBSCRIPTION_ID = "subscriptionId"
-    SERVICE_STATE = "serviceState"
-
-
-def dumpsys_last_call_info(ad):
-    """Get call information by dumpsys telecom."""
-    num = dumpsys_last_call_number(ad)
-    output = ad.adb.shell("dumpsys telecom")
-    result = re.search(r"Call TC@%s: {(.*?)}" % num, output, re.DOTALL)
-    call_info = {"TC": num}
-    if result:
-        result = result.group(1)
-        for attr in (
-            "startTime",
-            "endTime",
-            "direction",
-            "isInterrupted",
-            "callTechnologies",
-            "callTerminationsReason",
-            "isVideoCall",
-            "callProperties",
-        ):
-            match = re.search(r"%s: (.*)" % attr, result)
-            if match:
-                if attr in ("startTime", "endTime"):
-                    call_info[attr] = epoch_to_log_line_timestamp(int(match.group(1)))
-                else:
-                    call_info[attr] = match.group(1)
-    ad.log.debug("call_info = %s", call_info)
-    return call_info
-
-
-def dumpsys_last_call_number(ad):
-    output = ad.adb.shell("dumpsys telecom")
-    call_nums = re.findall("Call TC@(\d+):", output)
-    if not call_nums:
-        return 0
-    else:
-        return int(call_nums[-1])
-
-
-def get_device_epoch_time(ad):
-    return int(1000 * float(ad.adb.shell("date +%s.%N")))
-
-
-def get_outgoing_voice_sub_id(ad):
-    """Get outgoing voice subscription id"""
-    if hasattr(ad, "outgoing_voice_sub_id"):
-        return ad.outgoing_voice_sub_id
-    else:
-        return ad.droid.subscriptionGetDefaultVoiceSubId()
-
-
-def get_rx_tx_power_levels(log, ad):
-    """Obtains Rx and Tx power levels from the MDS application.
-
-    The method requires the MDS app to be installed in the DUT.
-
-    Args:
-        log: logger object
-        ad: an android device
-
-    Return:
-        A tuple where the first element is an array array with the RSRP value
-        in Rx chain, and the second element is the transmitted power in dBm.
-        Values for invalid Rx / Tx chains are set to None.
-    """
-    cmd = (
-        'am instrument -w -e request "80 00 e8 03 00 08 00 00 00" -e '
-        'response wait "com.google.mdstest/com.google.mdstest.instrument.'
-        'ModemCommandInstrumentation"'
-    )
-    try:
-        output = ad.adb.shell(cmd)
-    except AdbCommandError as e:
-        log.error(e)
-        output = None
-
-    if not output or "result=SUCCESS" not in output:
-        raise RuntimeError(
-            "Could not obtain Tx/Rx power levels from MDS. Is " "the MDS app installed?"
-        )
-
-    response = re.search(r"(?<=response=).+", output)
-
-    if not response:
-        raise RuntimeError("Invalid response from the MDS app:\n" + output)
-
-    # Obtain a list of bytes in hex format from the response string
-    response_hex = response.group(0).split(" ")
-
-    def get_bool(pos):
-        """Obtain a boolean variable from the byte array."""
-        return response_hex[pos] == "01"
-
-    def get_int32(pos):
-        """Obtain an int from the byte array. Bytes are printed in
-        little endian format."""
-        return struct.unpack(
-            "<i", bytearray.fromhex("".join(response_hex[pos : pos + 4]))
-        )[0]
-
-    rx_power = []
-    RX_CHAINS = 4
-
-    for i in range(RX_CHAINS):
-        # Calculate starting position for the Rx chain data structure
-        start = 12 + i * 22
-
-        # The first byte in the data structure indicates if the rx chain is
-        # valid.
-        if get_bool(start):
-            rx_power.append(get_int32(start + 2) / 10)
-        else:
-            rx_power.append(None)
-
-    # Calculate the position for the tx chain data structure
-    tx_pos = 12 + RX_CHAINS * 22
-
-    tx_valid = get_bool(tx_pos)
-    if tx_valid:
-        tx_power = get_int32(tx_pos + 2) / -10
-    else:
-        tx_power = None
-
-    return rx_power, tx_power
-
-
-def get_telephony_signal_strength(ad):
-    # {'evdoEcio': -1, 'asuLevel': 28, 'lteSignalStrength': 14, 'gsmLevel': 0,
-    # 'cdmaAsuLevel': 99, 'evdoDbm': -120, 'gsmDbm': -1, 'cdmaEcio': -160,
-    # 'level': 2, 'lteLevel': 2, 'cdmaDbm': -120, 'dbm': -112, 'cdmaLevel': 0,
-    # 'lteAsuLevel': 28, 'gsmAsuLevel': 99, 'gsmBitErrorRate': 0,
-    # 'lteDbm': -112, 'gsmSignalStrength': 99}
-    try:
-        signal_strength = ad.droid.telephonyGetSignalStrength()
-        if not signal_strength:
-            signal_strength = {}
-    except Exception as e:
-        ad.log.error(e)
-        signal_strength = {}
-    return signal_strength
-
-
-def initiate_call(
-    log,
-    ad,
-    callee_number,
-    emergency=False,
-    incall_ui_display=INCALL_UI_DISPLAY_FOREGROUND,
-    video=False,
-):
-    """Make phone call from caller to callee.
-
-    Args:
-        log: log object.
-        ad: Caller android device object.
-        callee_number: Callee phone number.
-        emergency : specify the call is emergency.
-            Optional. Default value is False.
-        incall_ui_display: show the dialer UI foreground or background
-        video: whether to initiate as video call
-
-    Returns:
-        result: if phone call is placed successfully.
-    """
-    ad.ed.clear_events(EVENT_CALL_STATE_CHANGED)
-    sub_id = get_outgoing_voice_sub_id(ad)
-    begin_time = get_device_epoch_time(ad)
-    ad.droid.telephonyStartTrackingCallStateForSubscription(sub_id)
-    try:
-        # Make a Call
-        ad.log.info("Make a phone call to %s", callee_number)
-        if emergency:
-            ad.droid.telecomCallEmergencyNumber(callee_number)
-        else:
-            ad.droid.telecomCallNumber(callee_number, video)
-
-        # Verify OFFHOOK state
-        if not wait_for_call_offhook_for_subscription(
-            log, ad, sub_id, event_tracking_started=True
-        ):
-            ad.log.info("sub_id %s not in call offhook state", sub_id)
-            last_call_drop_reason(ad, begin_time=begin_time)
-            return False
-        else:
-            return True
-    finally:
-        if hasattr(ad, "sdm_log") and getattr(ad, "sdm_log"):
-            ad.adb.shell("i2cset -fy 3 64 6 1 b", ignore_status=True)
-            ad.adb.shell("i2cset -fy 3 65 6 1 b", ignore_status=True)
-        ad.droid.telephonyStopTrackingCallStateChangeForSubscription(sub_id)
-        if incall_ui_display == INCALL_UI_DISPLAY_FOREGROUND:
-            ad.droid.telecomShowInCallScreen()
-        elif incall_ui_display == INCALL_UI_DISPLAY_BACKGROUND:
-            ad.droid.showHomeScreen()
-
-
-def is_event_match(event, field, value):
-    """Return if <field> in "event" match <value> or not.
-
-    Args:
-        event: event to test. This event need to have <field>.
-        field: field to match.
-        value: value to match.
-
-    Returns:
-        True if <field> in "event" match <value>.
-        False otherwise.
-    """
-    return is_event_match_for_list(event, field, [value])
-
-
-def is_event_match_for_list(event, field, value_list):
-    """Return if <field> in "event" match any one of the value
-        in "value_list" or not.
-
-    Args:
-        event: event to test. This event need to have <field>.
-        field: field to match.
-        value_list: a list of value to match.
-
-    Returns:
-        True if <field> in "event" match one of the value in "value_list".
-        False otherwise.
-    """
-    try:
-        value_in_event = event["data"][field]
-    except KeyError:
-        return False
-    for value in value_list:
-        if value_in_event == value:
-            return True
-    return False
-
-
-def is_phone_in_call(log, ad):
-    """Return True if phone in call.
-
-    Args:
-        log: log object.
-        ad:  android device.
-    """
-    try:
-        return ad.droid.telecomIsInCall()
-    except:
-        return "mCallState=2" in ad.adb.shell(
-            "dumpsys telephony.registry | grep mCallState"
-        )
-
-
-def last_call_drop_reason(ad, begin_time=None):
-    reasons = ad.search_logcat(
-        "qcril_qmi_voice_map_qmi_to_ril_last_call_failure_cause", begin_time
-    )
-    reason_string = ""
-    if reasons:
-        log_msg = "Logcat call drop reasons:"
-        for reason in reasons:
-            log_msg = "%s\n\t%s" % (log_msg, reason["log_message"])
-            if "ril reason str" in reason["log_message"]:
-                reason_string = reason["log_message"].split(":")[-1].strip()
-        ad.log.info(log_msg)
-    reasons = ad.search_logcat("ACTION_FORBIDDEN_NO_SERVICE_AUTHORIZATION", begin_time)
-    if reasons:
-        ad.log.warning("ACTION_FORBIDDEN_NO_SERVICE_AUTHORIZATION is seen")
-    ad.log.info("last call dumpsys: %s", sorted(dumpsys_last_call_info(ad).items()))
-    return reason_string
-
-
-def toggle_airplane_mode(log, ad, new_state=None, strict_checking=True):
-    """Toggle the state of airplane mode.
-
-    Args:
-        log: log handler.
-        ad: android_device object.
-        new_state: Airplane mode state to set to.
-            If None, opposite of the current state.
-        strict_checking: Whether to turn on strict checking that checks all features.
-
-    Returns:
-        result: True if operation succeed. False if error happens.
-    """
-    if ad.skip_sl4a:
-        return toggle_airplane_mode_by_adb(log, ad, new_state)
-    else:
-        return toggle_airplane_mode_msim(
-            log, ad, new_state, strict_checking=strict_checking
-        )
-
-
-def toggle_airplane_mode_by_adb(log, ad, new_state=None):
-    """Toggle the state of airplane mode.
-
-    Args:
-        log: log handler.
-        ad: android_device object.
-        new_state: Airplane mode state to set to.
-            If None, opposite of the current state.
-
-    Returns:
-        result: True if operation succeed. False if error happens.
-    """
-    cur_state = bool(int(ad.adb.shell("settings get global airplane_mode_on")))
-    if new_state == cur_state:
-        ad.log.info("Airplane mode already in %s", new_state)
-        return True
-    elif new_state is None:
-        new_state = not cur_state
-    ad.log.info("Change airplane mode from %s to %s", cur_state, new_state)
-    try:
-        ad.adb.shell("settings put global airplane_mode_on %s" % int(new_state))
-        ad.adb.shell("am broadcast -a android.intent.action.AIRPLANE_MODE")
-    except Exception as e:
-        ad.log.error(e)
-        return False
-    changed_state = bool(int(ad.adb.shell("settings get global airplane_mode_on")))
-    return changed_state == new_state
-
-
-def toggle_airplane_mode_msim(log, ad, new_state=None, strict_checking=True):
-    """Toggle the state of airplane mode.
-
-    Args:
-        log: log handler.
-        ad: android_device object.
-        new_state: Airplane mode state to set to.
-            If None, opposite of the current state.
-        strict_checking: Whether to turn on strict checking that checks all features.
-
-    Returns:
-        result: True if operation succeed. False if error happens.
-    """
-
-    cur_state = ad.droid.connectivityCheckAirplaneMode()
-    if cur_state == new_state:
-        ad.log.info("Airplane mode already in %s", new_state)
-        return True
-    elif new_state is None:
-        new_state = not cur_state
-        ad.log.info("Toggle APM mode, from current tate %s to %s", cur_state, new_state)
-    sub_id_list = []
-    active_sub_info = ad.droid.subscriptionGetAllSubInfoList()
-    if active_sub_info:
-        for info in active_sub_info:
-            sub_id_list.append(info["subscriptionId"])
-
-    ad.ed.clear_all_events()
-    time.sleep(0.1)
-    service_state_list = []
-    if new_state:
-        service_state_list.append(SERVICE_STATE_POWER_OFF)
-        ad.log.info("Turn on airplane mode")
-
-    else:
-        # If either one of these 3 events show up, it should be OK.
-        # Normal SIM, phone in service
-        service_state_list.append(SERVICE_STATE_IN_SERVICE)
-        # NO SIM, or Dead SIM, or no Roaming coverage.
-        service_state_list.append(SERVICE_STATE_OUT_OF_SERVICE)
-        service_state_list.append(SERVICE_STATE_EMERGENCY_ONLY)
-        ad.log.info("Turn off airplane mode")
-
-    for sub_id in sub_id_list:
-        ad.droid.telephonyStartTrackingServiceStateChangeForSubscription(sub_id)
-
-    timeout_time = time.time() + MAX_WAIT_TIME_AIRPLANEMODE_EVENT
-    ad.droid.connectivityToggleAirplaneMode(new_state)
-
-    try:
-        try:
-            event = ad.ed.wait_for_event(
-                EVENT_SERVICE_STATE_CHANGED,
-                is_event_match_for_list,
-                timeout=MAX_WAIT_TIME_AIRPLANEMODE_EVENT,
-                field=ServiceStateContainer.SERVICE_STATE,
-                value_list=service_state_list,
-            )
-            ad.log.info("Got event %s", event)
-        except Empty:
-            ad.log.warning(
-                "Did not get expected service state change to %s", service_state_list
-            )
-        finally:
-            for sub_id in sub_id_list:
-                ad.droid.telephonyStopTrackingServiceStateChangeForSubscription(sub_id)
-    except Exception as e:
-        ad.log.error(e)
-
-    # APM on (new_state=True) will turn off bluetooth but may not turn it on
-    try:
-        if new_state and not _wait_for_bluetooth_in_state(
-            log, ad, False, timeout_time - time.time()
-        ):
-            ad.log.error("Failed waiting for bluetooth during airplane mode toggle")
-            if strict_checking:
-                return False
-    except Exception as e:
-        ad.log.error("Failed to check bluetooth state due to %s", e)
-        if strict_checking:
-            raise
-
-    # APM on (new_state=True) will turn off wifi but may not turn it on
-    if new_state and not _wait_for_wifi_in_state(
-        log, ad, False, timeout_time - time.time()
-    ):
-        ad.log.error("Failed waiting for wifi during airplane mode toggle on")
-        if strict_checking:
-            return False
-
-    if ad.droid.connectivityCheckAirplaneMode() != new_state:
-        ad.log.error("Set airplane mode to %s failed", new_state)
-        return False
-    return True
-
-
-def toggle_cell_data_roaming(ad, state):
-    """Enable cell data roaming for default data subscription.
-
-    Wait for the data roaming status to be DATA_STATE_CONNECTED
-        or DATA_STATE_DISCONNECTED.
-
-    Args:
-        ad: Android Device Object.
-        state: True or False for enable or disable cell data roaming.
-
-    Returns:
-        True if success.
-        False if failed.
-    """
-    state_int = {True: DATA_ROAMING_ENABLE, False: DATA_ROAMING_DISABLE}[state]
-    action_str = {True: "Enable", False: "Disable"}[state]
-    if ad.droid.connectivityCheckDataRoamingMode() == state:
-        ad.log.info("Data roaming is already in state %s", state)
-        return True
-    if not ad.droid.connectivitySetDataRoaming(state_int):
-        ad.error.info("Fail to config data roaming into state %s", state)
-        return False
-    if ad.droid.connectivityCheckDataRoamingMode() == state:
-        ad.log.info("Data roaming is configured into state %s", state)
-        return True
-    else:
-        ad.log.error("Data roaming is not configured into state %s", state)
-        return False
-
-
-def wait_for_call_offhook_event(
-    log,
-    ad,
-    sub_id,
-    event_tracking_started=False,
-    timeout=MAX_WAIT_TIME_ACCEPT_CALL_TO_OFFHOOK_EVENT,
-):
-    """Wait for an incoming call on specified subscription.
-
-    Args:
-        log: log object.
-        ad: android device object.
-        event_tracking_started: True if event tracking already state outside
-        timeout: time to wait for event
-
-    Returns:
-        True: if call offhook event is received.
-        False: if call offhook event is not received.
-    """
-    if not event_tracking_started:
-        ad.ed.clear_events(EVENT_CALL_STATE_CHANGED)
-        ad.droid.telephonyStartTrackingCallStateForSubscription(sub_id)
-    try:
-        ad.ed.wait_for_event(
-            EVENT_CALL_STATE_CHANGED,
-            is_event_match,
-            timeout=timeout,
-            field=CallStateContainer.CALL_STATE,
-            value=TELEPHONY_STATE_OFFHOOK,
-        )
-        ad.log.info("Got event %s", TELEPHONY_STATE_OFFHOOK)
-    except Empty:
-        ad.log.info("No event for call state change to OFFHOOK")
-        return False
-    finally:
-        if not event_tracking_started:
-            ad.droid.telephonyStopTrackingCallStateChangeForSubscription(sub_id)
-    return True
-
-
-def wait_for_call_offhook_for_subscription(
-    log,
-    ad,
-    sub_id,
-    event_tracking_started=False,
-    timeout=MAX_WAIT_TIME_ACCEPT_CALL_TO_OFFHOOK_EVENT,
-    interval=WAIT_TIME_BETWEEN_STATE_CHECK,
-):
-    """Wait for an incoming call on specified subscription.
-
-    Args:
-        log: log object.
-        ad: android device object.
-        sub_id: subscription ID
-        timeout: time to wait for ring
-        interval: checking interval
-
-    Returns:
-        True: if incoming call is received and answered successfully.
-        False: for errors
-    """
-    if not event_tracking_started:
-        ad.ed.clear_events(EVENT_CALL_STATE_CHANGED)
-        ad.droid.telephonyStartTrackingCallStateForSubscription(sub_id)
-    offhook_event_received = False
-    end_time = time.time() + timeout
-    try:
-        while time.time() < end_time:
-            if not offhook_event_received:
-                if wait_for_call_offhook_event(log, ad, sub_id, True, interval):
-                    offhook_event_received = True
-            telephony_state = ad.droid.telephonyGetCallStateForSubscription(sub_id)
-            telecom_state = ad.droid.telecomGetCallState()
-            if telephony_state == TELEPHONY_STATE_OFFHOOK and (
-                telecom_state == TELEPHONY_STATE_OFFHOOK
-            ):
-                ad.log.info("telephony and telecom are in OFFHOOK state")
-                return True
-            else:
-                ad.log.info(
-                    "telephony in %s, telecom in %s, expecting OFFHOOK state",
-                    telephony_state,
-                    telecom_state,
-                )
-            if offhook_event_received:
-                time.sleep(interval)
-    finally:
-        if not event_tracking_started:
-            ad.droid.telephonyStopTrackingCallStateChangeForSubscription(sub_id)
-
-
-def _wait_for_bluetooth_in_state(log, ad, state, max_wait):
-    # FIXME: These event names should be defined in a common location
-    _BLUETOOTH_STATE_ON_EVENT = "BluetoothStateChangedOn"
-    _BLUETOOTH_STATE_OFF_EVENT = "BluetoothStateChangedOff"
-    ad.ed.clear_events(_BLUETOOTH_STATE_ON_EVENT)
-    ad.ed.clear_events(_BLUETOOTH_STATE_OFF_EVENT)
-
-    ad.droid.bluetoothStartListeningForAdapterStateChange()
-    try:
-        bt_state = ad.droid.bluetoothCheckState()
-        if bt_state == state:
-            return True
-        if max_wait <= 0:
-            ad.log.error(
-                "Time out: bluetooth state still %s, expecting %s", bt_state, state
-            )
-            return False
-
-        event = {False: _BLUETOOTH_STATE_OFF_EVENT, True: _BLUETOOTH_STATE_ON_EVENT}[
-            state
-        ]
-        event = ad.ed.pop_event(event, max_wait)
-        ad.log.info("Got event %s", event["name"])
-        return True
-    except Empty:
-        ad.log.error(
-            "Time out: bluetooth state still in %s, expecting %s", bt_state, state
-        )
-        return False
-    finally:
-        ad.droid.bluetoothStopListeningForAdapterStateChange()
-
-
-def wait_for_droid_in_call(log, ad, max_time):
-    """Wait for android to be in call state.
-
-    Args:
-        log: log object.
-        ad:  android device.
-        max_time: maximal wait time.
-
-    Returns:
-        If phone become in call state within max_time, return True.
-        Return False if timeout.
-    """
-    return _wait_for_droid_in_state(log, ad, max_time, is_phone_in_call)
-
-
-def _wait_for_droid_in_state(log, ad, max_time, state_check_func, *args, **kwargs):
-    while max_time >= 0:
-        if state_check_func(log, ad, *args, **kwargs):
-            return True
-
-        time.sleep(WAIT_TIME_BETWEEN_STATE_CHECK)
-        max_time -= WAIT_TIME_BETWEEN_STATE_CHECK
-
-    return False
-
-
-# TODO: replace this with an event-based function
-def _wait_for_wifi_in_state(log, ad, state, max_wait):
-    return _wait_for_droid_in_state(
-        log,
-        ad,
-        max_wait,
-        lambda log, ad, state: (True if ad.droid.wifiCheckState() == state else False),
-        state,
-    )
diff --git a/src/antlion/controllers/ap_lib/hostapd_security.py b/src/antlion/controllers/ap_lib/hostapd_security.py
deleted file mode 100644
index 69d5c2f..0000000
--- a/src/antlion/controllers/ap_lib/hostapd_security.py
+++ /dev/null
@@ -1,154 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-import string
-
-from typing import Dict, Optional, Union
-
-from antlion.controllers.ap_lib import hostapd_constants
-
-
-class Security(object):
-    """The Security class for hostapd representing some of the security
-    settings that are allowed in hostapd.  If needed more can be added.
-    """
-
-    def __init__(
-        self,
-        security_mode: Optional[str] = None,
-        password: Optional[str] = None,
-        wpa_cipher: str = hostapd_constants.WPA_DEFAULT_CIPHER,
-        wpa2_cipher: str = hostapd_constants.WPA2_DEFAULT_CIPER,
-        wpa_group_rekey: int = hostapd_constants.WPA_GROUP_KEY_ROTATION_TIME,
-        wpa_strict_rekey: bool = hostapd_constants.WPA_STRICT_REKEY_DEFAULT,
-        wep_default_key: int = hostapd_constants.WEP_DEFAULT_KEY,
-        radius_server_ip: Optional[str] = None,
-        radius_server_port: Optional[int] = None,
-        radius_server_secret: Optional[str] = None,
-    ) -> None:
-        """Gather all of the security settings for WPA-PSK.  This could be
-           expanded later.
-
-        Args:
-            security_mode: Type of security modes.
-                        Options: wep, wpa, wpa2, wpa/wpa2, wpa3, wpa2/wpa3,
-                        wpa/wpa2/wpa3
-            password: The PSK or passphrase for the security mode.
-            wpa_cipher: The cipher to be used for wpa.
-                        Options: TKIP, CCMP, TKIP CCMP
-                        Default: TKIP
-            wpa2_cipher: The cipher to be used for wpa2.
-                         Options: TKIP, CCMP, TKIP CCMP
-                         Default: CCMP
-            wpa_group_rekey: How often to refresh the GTK regardless of network
-                             changes.
-                             Options: An integrer in seconds, None
-                             Default: 600 seconds
-            wpa_strict_rekey: Whether to do a group key update when client
-                              leaves the network or not.
-                              Options: True, False
-                              Default: True
-            wep_default_key: The wep key number to use when transmitting.
-            radius_server_ip: Radius server IP for Enterprise auth.
-            radius_server_port: Radius server port for Enterprise auth.
-            radius_server_secret: Radius server secret for Enterprise auth.
-        """
-        self.security_mode_string = security_mode
-        self.wpa_cipher = wpa_cipher
-        self.wpa2_cipher = wpa2_cipher
-        self.wpa_group_rekey = wpa_group_rekey
-        self.wpa_strict_rekey = wpa_strict_rekey
-        self.wep_default_key = wep_default_key
-        self.radius_server_ip = radius_server_ip
-        self.radius_server_port = radius_server_port
-        self.radius_server_secret = radius_server_secret
-        self.security_mode = hostapd_constants.SECURITY_STRING_TO_SECURITY_MODE_INT.get(
-            security_mode, None
-        )
-        if password:
-            if self.security_mode == hostapd_constants.WEP:
-                if len(password) in hostapd_constants.WEP_STR_LENGTH:
-                    self.password = '"%s"' % password
-                elif len(password) in hostapd_constants.WEP_HEX_LENGTH and all(
-                    c in string.hexdigits for c in password
-                ):
-                    self.password = password
-                else:
-                    raise ValueError(
-                        "WEP key must be a hex string of %s characters"
-                        % hostapd_constants.WEP_HEX_LENGTH
-                    )
-            else:
-                if (
-                    len(password) < hostapd_constants.MIN_WPA_PSK_LENGTH
-                    or len(password) > hostapd_constants.MAX_WPA_PSK_LENGTH
-                ):
-                    raise ValueError(
-                        "Password must be a minumum of %s characters and a maximum of %s"
-                        % (
-                            hostapd_constants.MIN_WPA_PSK_LENGTH,
-                            hostapd_constants.MAX_WPA_PSK_LENGTH,
-                        )
-                    )
-                else:
-                    self.password = password
-
-    def generate_dict(self) -> Dict[str, Union[str, int]]:
-        """Returns: an ordered dictionary of settings"""
-        settings = collections.OrderedDict()
-        if self.security_mode is not None:
-            if self.security_mode == hostapd_constants.WEP:
-                settings["wep_default_key"] = self.wep_default_key
-                settings["wep_key" + str(self.wep_default_key)] = self.password
-            elif self.security_mode == hostapd_constants.ENT:
-                settings["auth_server_addr"] = self.radius_server_ip
-                settings["auth_server_port"] = self.radius_server_port
-                settings["auth_server_shared_secret"] = self.radius_server_secret
-                settings["wpa_key_mgmt"] = hostapd_constants.ENT_KEY_MGMT
-                settings["ieee8021x"] = hostapd_constants.IEEE8021X
-                settings["wpa"] = hostapd_constants.WPA2
-            else:
-                settings["wpa"] = self.security_mode
-                if len(self.password) == hostapd_constants.MAX_WPA_PSK_LENGTH:
-                    settings["wpa_psk"] = self.password
-                else:
-                    settings["wpa_passphrase"] = self.password
-                # For wpa, wpa/wpa2, and wpa/wpa2/wpa3, add wpa_pairwise
-                if (
-                    self.security_mode == hostapd_constants.WPA1
-                    or self.security_mode == hostapd_constants.MIXED
-                ):
-                    settings["wpa_pairwise"] = self.wpa_cipher
-                # For wpa/wpa2, wpa2, wpa3, and wpa2/wpa3, and wpa/wpa2, wpa3, add rsn_pairwise
-                if (
-                    self.security_mode == hostapd_constants.WPA2
-                    or self.security_mode == hostapd_constants.MIXED
-                ):
-                    settings["rsn_pairwise"] = self.wpa2_cipher
-                # Add wpa_key_mgmt based on security mode string
-                if (
-                    self.security_mode_string
-                    in hostapd_constants.SECURITY_STRING_TO_WPA_KEY_MGMT
-                ):
-                    settings[
-                        "wpa_key_mgmt"
-                    ] = hostapd_constants.SECURITY_STRING_TO_WPA_KEY_MGMT[
-                        self.security_mode_string
-                    ]
-                if self.wpa_group_rekey:
-                    settings["wpa_group_rekey"] = self.wpa_group_rekey
-                if self.wpa_strict_rekey:
-                    settings["wpa_strict_rekey"] = hostapd_constants.WPA_STRICT_REKEY
-        return settings
diff --git a/src/antlion/controllers/ap_lib/hostapd_utils.py b/src/antlion/controllers/ap_lib/hostapd_utils.py
deleted file mode 100644
index 82331bf..0000000
--- a/src/antlion/controllers/ap_lib/hostapd_utils.py
+++ /dev/null
@@ -1,99 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import utils
-from antlion.controllers.ap_lib import hostapd_config
-from antlion.controllers.ap_lib import hostapd_constants
-
-
-def generate_random_password(security_mode=None, length=None, hex=None):
-    """Generates a random password. Defaults to an 8 character ASCII password.
-
-    Args:
-        security_mode: optional string, security type. Used to determine if
-            length should be WEP compatible (useful for generated tests to simply
-            pass in security mode)
-        length: optional int, length of password to generate. Defaults to 8,
-            unless security_mode is WEP, then 13
-        hex: optional int, if True, generates a hex string, else ascii
-    """
-    if hex:
-        generator_func = utils.rand_hex_str
-    else:
-        generator_func = utils.rand_ascii_str
-
-    if length:
-        return generator_func(length)
-    if security_mode and security_mode.lower() == hostapd_constants.WEP_STRING:
-        return generator_func(hostapd_constants.WEP_DEFAULT_STR_LENGTH)
-    else:
-        return generator_func(hostapd_constants.MIN_WPA_PSK_LENGTH)
-
-
-def verify_interface(interface, valid_interfaces):
-    """Raises error if interface is missing or invalid
-    Args:
-        interface: string of interface name
-        valid_interfaces: list of valid interface names
-    """
-    if not interface:
-        raise ValueError("Required wlan interface is missing.")
-    if interface not in valid_interfaces:
-        raise ValueError("Invalid interface name was passed: %s" % interface)
-
-
-def verify_security_mode(security_profile, valid_security_modes):
-    """Raises error if security mode is not in list of valid security modes.
-
-    Args:
-        security_profile: a hostapd_security.Security object.
-        valid_security_modes: a list of valid security modes for a profile. Must
-            include None if open security is valid.
-    """
-    if security_profile is None:
-        if None not in valid_security_modes:
-            raise ValueError("Open security is not allowed for this profile.")
-    elif security_profile.security_mode not in valid_security_modes:
-        raise ValueError(
-            "Invalid Security Mode: %s. "
-            "Valid Security Modes for this profile: %s."
-            % (security_profile.security_mode, valid_security_modes)
-        )
-
-
-def verify_cipher(security_profile, valid_ciphers):
-    """Raise error if cipher is not in list of valid ciphers.
-
-    Args:
-        security_profile: a hostapd_security.Security object.
-        valid_ciphers: a list of valid ciphers for a profile.
-    """
-    if security_profile is None:
-        raise ValueError("Security mode is open.")
-    elif security_profile.security_mode == hostapd_constants.WPA1:
-        if security_profile.wpa_cipher not in valid_ciphers:
-            raise ValueError(
-                "Invalid WPA Cipher: %s. "
-                "Valid WPA Ciphers for this profile: %s"
-                % (security_profile.wpa_cipher, valid_ciphers)
-            )
-    elif security_profile.security_mode == hostapd_constants.WPA2:
-        if security_profile.wpa2_cipher not in valid_ciphers:
-            raise ValueError(
-                "Invalid WPA2 Cipher: %s. "
-                "Valid WPA2 Ciphers for this profile: %s"
-                % (security_profile.wpa2_cipher, valid_ciphers)
-            )
-    else:
-        raise ValueError("Invalid Security Mode: %s" % security_profile.security_mode)
diff --git a/src/antlion/controllers/attenuator.py b/src/antlion/controllers/attenuator.py
deleted file mode 100644
index 440e90a..0000000
--- a/src/antlion/controllers/attenuator.py
+++ /dev/null
@@ -1,417 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import importlib
-import logging
-
-from antlion.keys import Config
-from antlion.libs.proc import job
-
-MOBLY_CONTROLLER_CONFIG_NAME = "Attenuator"
-ACTS_CONTROLLER_REFERENCE_NAME = "attenuators"
-_ATTENUATOR_OPEN_RETRIES = 3
-
-
-def create(configs):
-    objs = []
-    for c in configs:
-        attn_model = c["Model"]
-        # Default to telnet.
-        protocol = c.get("Protocol", "telnet")
-        module_name = "antlion.controllers.attenuator_lib.%s.%s" % (
-            attn_model,
-            protocol,
-        )
-        module = importlib.import_module(module_name)
-        inst_cnt = c["InstrumentCount"]
-        attn_inst = module.AttenuatorInstrument(inst_cnt)
-        attn_inst.model = attn_model
-
-        ip_address = c[Config.key_address.value]
-        port = c[Config.key_port.value]
-
-        for attempt_number in range(1, _ATTENUATOR_OPEN_RETRIES + 1):
-            try:
-                attn_inst.open(ip_address, port)
-            except Exception as e:
-                logging.error(
-                    "Attempt %s to open connection to attenuator "
-                    "failed: %s" % (attempt_number, e)
-                )
-                if attempt_number == _ATTENUATOR_OPEN_RETRIES:
-                    ping_output = job.run(
-                        "ping %s -c 1 -w 1" % ip_address, ignore_status=True
-                    )
-                    if ping_output.exit_status == 1:
-                        logging.error("Unable to ping attenuator at %s" % ip_address)
-                    else:
-                        logging.error("Able to ping attenuator at %s" % ip_address)
-                        job.run(
-                            'echo "q" | telnet %s %s' % (ip_address, port),
-                            ignore_status=True,
-                        )
-                    raise
-        for i in range(inst_cnt):
-            attn = Attenuator(attn_inst, idx=i)
-            if "Paths" in c:
-                try:
-                    setattr(attn, "path", c["Paths"][i])
-                except IndexError:
-                    logging.error("No path specified for attenuator %d.", i)
-                    raise
-            objs.append(attn)
-    return objs
-
-
-def get_info(attenuators):
-    """Get information on a list of Attenuator objects.
-
-    Args:
-        attenuators: A list of Attenuator objects.
-
-    Returns:
-        A list of dict, each representing info for Attenuator objects.
-    """
-    device_info = []
-    for attenuator in attenuators:
-        info = {
-            "Address": attenuator.instrument.address,
-            "Attenuator_Port": attenuator.idx,
-        }
-        device_info.append(info)
-    return device_info
-
-
-def destroy(objs):
-    for attn in objs:
-        attn.instrument.close()
-
-
-def get_attenuators_for_device(device_attenuator_configs, attenuators, attenuator_key):
-    """Gets the list of attenuators associated to a specified device and builds
-    a list of the attenuator objects associated to the ip address in the
-    device's section of the ACTS config and the Attenuator's IP address.  In the
-    example below the access point object has an attenuator dictionary with
-    IP address associated to an attenuator object.  The address is the only
-    mandatory field and the 'attenuator_ports_wifi_2g' and
-    'attenuator_ports_wifi_5g' are the attenuator_key specified above.  These
-    can be anything and is sent in as a parameter to this function.  The numbers
-    in the list are ports that are in the attenuator object.  Below is an
-    standard Access_Point object and the link to a standard Attenuator object.
-    Notice the link is the IP address, which is why the IP address is mandatory.
-
-    "AccessPoint": [
-        {
-          "ssh_config": {
-            "user": "root",
-            "host": "192.168.42.210"
-          },
-          "Attenuator": [
-            {
-              "Address": "192.168.42.200",
-              "attenuator_ports_wifi_2g": [
-                0,
-                1,
-                3
-              ],
-              "attenuator_ports_wifi_5g": [
-                0,
-                1
-              ]
-            }
-          ]
-        }
-      ],
-      "Attenuator": [
-        {
-          "Model": "minicircuits",
-          "InstrumentCount": 4,
-          "Address": "192.168.42.200",
-          "Port": 23
-        }
-      ]
-    Args:
-        device_attenuator_configs: A list of attenuators config information in
-            the acts config that are associated a particular device.
-        attenuators: A list of all of the available attenuators objects
-            in the testbed.
-        attenuator_key: A string that is the key to search in the device's
-            configuration.
-
-    Returns:
-        A list of attenuator objects for the specified device and the key in
-        that device's config.
-    """
-    attenuator_list = []
-    for device_attenuator_config in device_attenuator_configs:
-        for attenuator_port in device_attenuator_config[attenuator_key]:
-            for attenuator in attenuators:
-                if (
-                    attenuator.instrument.address == device_attenuator_config["Address"]
-                    and attenuator.idx is attenuator_port
-                ):
-                    attenuator_list.append(attenuator)
-    return attenuator_list
-
-
-"""Classes for accessing, managing, and manipulating attenuators.
-
-Users will instantiate a specific child class, but almost all operation should
-be performed on the methods and data members defined here in the base classes
-or the wrapper classes.
-"""
-
-
-class AttenuatorError(Exception):
-    """Base class for all errors generated by Attenuator-related modules."""
-
-
-class InvalidDataError(AttenuatorError):
-    """ "Raised when an unexpected result is seen on the transport layer.
-
-    When this exception is seen, closing an re-opening the link to the
-    attenuator instrument is probably necessary. Something has gone wrong in
-    the transport.
-    """
-
-
-class InvalidOperationError(AttenuatorError):
-    """Raised when the attenuator's state does not allow the given operation.
-
-    Certain methods may only be accessed when the instance upon which they are
-    invoked is in a certain state. This indicates that the object is not in the
-    correct state for a method to be called.
-    """
-
-
-class AttenuatorInstrument(object):
-    """Defines the primitive behavior of all attenuator instruments.
-
-    The AttenuatorInstrument class is designed to provide a simple low-level
-    interface for accessing any step attenuator instrument comprised of one or
-    more attenuators and a controller. All AttenuatorInstruments should override
-    all the methods below and call AttenuatorInstrument.__init__ in their
-    constructors. Outside of setup/teardown, devices should be accessed via
-    this generic "interface".
-    """
-
-    model = None
-    INVALID_MAX_ATTEN = 999.9
-
-    def __init__(self, num_atten=0):
-        """This is the Constructor for Attenuator Instrument.
-
-        Args:
-            num_atten: The number of attenuators contained within the
-                instrument. In some instances setting this number to zero will
-                allow the driver to auto-determine the number of attenuators;
-                however, this behavior is not guaranteed.
-
-        Raises:
-            NotImplementedError if initialization is called from this class.
-        """
-
-        if type(self) is AttenuatorInstrument:
-            raise NotImplementedError("Base class should not be instantiated directly!")
-
-        self.num_atten = num_atten
-        self.max_atten = AttenuatorInstrument.INVALID_MAX_ATTEN
-        self.properties = None
-
-    def set_atten(self, idx, value, strict=True, retry=False):
-        """Sets the attenuation given its index in the instrument.
-
-        Args:
-            idx: A zero based index used to identify a particular attenuator in
-                an instrument.
-            value: a floating point value for nominal attenuation to be set.
-            strict: if True, function raises an error when given out of
-                bounds attenuation values, if false, the function sets out of
-                bounds values to 0 or max_atten.
-            retry: if True, command will be retried if possible
-        """
-        raise NotImplementedError("Base class should not be called directly!")
-
-    def get_atten(self, idx, retry=False):
-        """Returns the current attenuation of the attenuator at index idx.
-
-        Args:
-            idx: A zero based index used to identify a particular attenuator in
-                an instrument.
-            retry: if True, command will be retried if possible
-
-        Returns:
-            The current attenuation value as a floating point value
-        """
-        raise NotImplementedError("Base class should not be called directly!")
-
-
-class Attenuator(object):
-    """An object representing a single attenuator in a remote instrument.
-
-    A user wishing to abstract the mapping of attenuators to physical
-    instruments should use this class, which provides an object that abstracts
-    the physical implementation and allows the user to think only of attenuators
-    regardless of their location.
-    """
-
-    def __init__(self, instrument, idx=0, offset=0):
-        """This is the constructor for Attenuator
-
-        Args:
-            instrument: Reference to an AttenuatorInstrument on which the
-                Attenuator resides
-            idx: This zero-based index is the identifier for a particular
-                attenuator in an instrument.
-            offset: A power offset value for the attenuator to be used when
-                performing future operations. This could be used for either
-                calibration or to allow group operations with offsets between
-                various attenuators.
-
-        Raises:
-            TypeError if an invalid AttenuatorInstrument is passed in.
-            IndexError if the index is out of range.
-        """
-        if not isinstance(instrument, AttenuatorInstrument):
-            raise TypeError("Must provide an Attenuator Instrument Ref")
-        self.model = instrument.model
-        self.instrument = instrument
-        self.idx = idx
-        self.offset = offset
-
-        if self.idx >= instrument.num_atten:
-            raise IndexError("Attenuator index out of range for attenuator instrument")
-
-    def set_atten(self, value, strict=True, retry=False):
-        """Sets the attenuation.
-
-        Args:
-            value: A floating point value for nominal attenuation to be set.
-            strict: if True, function raises an error when given out of
-                bounds attenuation values, if false, the function sets out of
-                bounds values to 0 or max_atten.
-            retry: if True, command will be retried if possible
-
-        Raises:
-            ValueError if value + offset is greater than the maximum value.
-        """
-        if value + self.offset > self.instrument.max_atten and strict:
-            raise ValueError("Attenuator Value+Offset greater than Max Attenuation!")
-
-        self.instrument.set_atten(
-            self.idx, value + self.offset, strict=strict, retry=retry
-        )
-
-    def get_atten(self, retry=False):
-        """Returns the attenuation as a float, normalized by the offset."""
-        return self.instrument.get_atten(self.idx, retry) - self.offset
-
-    def get_max_atten(self):
-        """Returns the max attenuation as a float, normalized by the offset."""
-        if self.instrument.max_atten == AttenuatorInstrument.INVALID_MAX_ATTEN:
-            raise ValueError("Invalid Max Attenuator Value")
-
-        return self.instrument.max_atten - self.offset
-
-
-class AttenuatorGroup(object):
-    """An abstraction for groups of attenuators that will share behavior.
-
-    Attenuator groups are intended to further facilitate abstraction of testing
-    functions from the physical objects underlying them. By adding attenuators
-    to a group, it is possible to operate on functional groups that can be
-    thought of in a common manner in the test. This class is intended to provide
-    convenience to the user and avoid re-implementation of helper functions and
-    small loops scattered throughout user code.
-    """
-
-    def __init__(self, name=""):
-        """This constructor for AttenuatorGroup
-
-        Args:
-            name: An optional parameter intended to further facilitate the
-                passing of easily tracked groups of attenuators throughout code.
-                It is left to the user to use the name in a way that meets their
-                needs.
-        """
-        self.name = name
-        self.attens = []
-        self._value = 0
-
-    def add_from_instrument(self, instrument, indices):
-        """Adds an AttenuatorInstrument to the group.
-
-        This function will create Attenuator objects for all of the indices
-        passed in and add them to the group.
-
-        Args:
-            instrument: the AttenuatorInstrument to pull attenuators from.
-                indices: The index or indices to add to the group. Either a
-                range, a list, or a single integer.
-
-        Raises
-        ------
-        TypeError
-            Requires a valid AttenuatorInstrument to be passed in.
-        """
-        if not instrument or not isinstance(instrument, AttenuatorInstrument):
-            raise TypeError("Must provide an Attenuator Instrument Ref")
-
-        if type(indices) is range or type(indices) is list:
-            for i in indices:
-                self.attens.append(Attenuator(instrument, i))
-        elif type(indices) is int:
-            self.attens.append(Attenuator(instrument, indices))
-
-    def add(self, attenuator):
-        """Adds an already constructed Attenuator object to this group.
-
-        Args:
-            attenuator: An Attenuator object.
-
-        Raises:
-            TypeError if the attenuator parameter is not an Attenuator.
-        """
-        if not isinstance(attenuator, Attenuator):
-            raise TypeError("Must provide an Attenuator")
-
-        self.attens.append(attenuator)
-
-    def synchronize(self):
-        """Sets all grouped attenuators to the group's attenuation value."""
-        self.set_atten(self._value)
-
-    def is_synchronized(self):
-        """Returns true if all attenuators have the synchronized value."""
-        for att in self.attens:
-            if att.get_atten() != self._value:
-                return False
-        return True
-
-    def set_atten(self, value):
-        """Sets the attenuation value of all attenuators in the group.
-
-        Args:
-            value: A floating point value for nominal attenuation to be set.
-        """
-        value = float(value)
-        for att in self.attens:
-            att.set_atten(value)
-        self._value = value
-
-    def get_atten(self):
-        """Returns the current attenuation setting of AttenuatorGroup."""
-        return float(self._value)
diff --git a/src/antlion/controllers/attenuator_lib/aeroflex/telnet.py b/src/antlion/controllers/attenuator_lib/aeroflex/telnet.py
deleted file mode 100644
index 4c34f4b..0000000
--- a/src/antlion/controllers/attenuator_lib/aeroflex/telnet.py
+++ /dev/null
@@ -1,130 +0,0 @@
-#!/usr/bin/env python3
-
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Class for Telnet control of Aeroflex 832X and 833X Series Attenuator Modules
-
-This class provides a wrapper to the Aeroflex attenuator modules for purposes
-of simplifying and abstracting control down to the basic necessities. It is
-not the intention of the module to expose all functionality, but to allow
-interchangeable HW to be used.
-
-See http://www.aeroflex.com/ams/weinschel/PDFILES/IM-608-Models-8320-&-8321-preliminary.pdf
-"""
-
-from antlion.controllers import attenuator
-from antlion.controllers.attenuator_lib import _tnhelper
-
-
-class AttenuatorInstrument(attenuator.AttenuatorInstrument):
-    def __init__(self, num_atten=0):
-        super(AttenuatorInstrument, self).__init__(num_atten)
-
-        self._tnhelper = _tnhelper._TNHelper(
-            tx_cmd_separator="\r\n", rx_cmd_separator="\r\n", prompt=">"
-        )
-        self.properties = None
-        self.address = None
-
-    def open(self, host, port=23):
-        """Opens a telnet connection to the desired AttenuatorInstrument and
-        queries basic information.
-
-        Args:
-            host: A valid hostname (IP address or DNS-resolvable name) to an
-            MC-DAT attenuator instrument.
-            port: An optional port number (defaults to telnet default 23)
-        """
-        self._tnhelper.open(host, port)
-
-        # work around a bug in IO, but this is a good thing to do anyway
-        self._tnhelper.cmd("*CLS", False)
-        self.address = host
-
-        if self.num_atten == 0:
-            self.num_atten = int(self._tnhelper.cmd("RFCONFIG? CHAN"))
-
-        configstr = self._tnhelper.cmd("RFCONFIG? ATTN 1")
-
-        self.properties = dict(
-            zip(
-                ["model", "max_atten", "min_step", "unknown", "unknown2", "cfg_str"],
-                configstr.split(", ", 5),
-            )
-        )
-
-        self.max_atten = float(self.properties["max_atten"])
-
-    def is_open(self):
-        """Returns True if the AttenuatorInstrument has an open connection."""
-        return bool(self._tnhelper.is_open())
-
-    def close(self):
-        """Closes the telnet connection.
-
-        This should be called as part of any teardown procedure prior to the
-        attenuator instrument leaving scope.
-        """
-        self._tnhelper.close()
-
-    def set_atten(self, idx, value, **_):
-        """This function sets the attenuation of an attenuator given its index
-        in the instrument.
-
-        Args:
-            idx: A zero-based index that identifies a particular attenuator in
-                an instrument. For instruments that only have one channel, this
-                is ignored by the device.
-            value: A floating point value for nominal attenuation to be set.
-
-        Raises:
-            InvalidOperationError if the telnet connection is not open.
-            IndexError if the index is not valid for this instrument.
-            ValueError if the requested set value is greater than the maximum
-                attenuation value.
-        """
-        if not self.is_open():
-            raise attenuator.InvalidOperationError("Connection not open!")
-
-        if idx >= self.num_atten:
-            raise IndexError("Attenuator index out of range!", self.num_atten, idx)
-
-        if value > self.max_atten:
-            raise ValueError("Attenuator value out of range!", self.max_atten, value)
-
-        self._tnhelper.cmd("ATTN " + str(idx + 1) + " " + str(value), False)
-
-    def get_atten(self, idx, **_):
-        """Returns the current attenuation of the attenuator at the given index.
-
-        Args:
-            idx: The index of the attenuator.
-
-        Raises:
-            InvalidOperationError if the telnet connection is not open.
-
-        Returns:
-            the current attenuation value as a float
-        """
-        if not self.is_open():
-            raise attenuator.InvalidOperationError("Connection not open!")
-
-        #       Potentially redundant safety check removed for the moment
-        #       if idx >= self.num_atten:
-        #           raise IndexError("Attenuator index out of range!", self.num_atten, idx)
-
-        atten_val = self._tnhelper.cmd("ATTN? " + str(idx + 1))
-
-        return float(atten_val)
diff --git a/src/antlion/controllers/attenuator_lib/minicircuits/http.py b/src/antlion/controllers/attenuator_lib/minicircuits/http.py
deleted file mode 100644
index 61c1e29..0000000
--- a/src/antlion/controllers/attenuator_lib/minicircuits/http.py
+++ /dev/null
@@ -1,166 +0,0 @@
-#!/usr/bin/env python3
-
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Class for HTTP control of Mini-Circuits RCDAT series attenuators
-
-This class provides a wrapper to the MC-RCDAT attenuator modules for purposes
-of simplifying and abstracting control down to the basic necessities. It is
-not the intention of the module to expose all functionality, but to allow
-interchangeable HW to be used.
-
-See http://www.minicircuits.com/softwaredownload/Prog_Manual-6-Programmable_Attenuator.pdf
-"""
-
-import urllib
-from antlion.controllers import attenuator
-
-
-class AttenuatorInstrument(attenuator.AttenuatorInstrument):
-    """A specific HTTP-controlled implementation of AttenuatorInstrument for
-    Mini-Circuits RC-DAT attenuators.
-
-    With the exception of HTTP-specific commands, all functionality is defined
-    by the AttenuatorInstrument class.
-    """
-
-    def __init__(self, num_atten=1):
-        super(AttenuatorInstrument, self).__init__(num_atten)
-        self._ip_address = None
-        self._port = None
-        self._timeout = None
-        self.address = None
-
-    def open(self, host, port=80, timeout=2):
-        """Initializes the AttenuatorInstrument and queries basic information.
-
-        Args:
-            host: A valid hostname (IP address or DNS-resolvable name) to an
-            MC-DAT attenuator instrument.
-            port: An optional port number (defaults to http default 80)
-            timeout: An optional timeout for http requests
-        """
-        self._ip_address = host
-        self._port = port
-        self._timeout = timeout
-        self.address = host
-
-        att_req = urllib.request.urlopen(
-            "http://{}:{}/MN?".format(self._ip_address, self._port)
-        )
-        config_str = att_req.read().decode("utf-8").strip()
-        if not config_str.startswith("MN="):
-            raise attenuator.InvalidDataError(
-                "Attenuator returned invalid data. Attenuator returned: {}".format(
-                    config_str
-                )
-            )
-
-        config_str = config_str[len("MN=") :]
-        self.properties = dict(
-            zip(["model", "max_freq", "max_atten"], config_str.split("-", 2))
-        )
-        self.max_atten = float(self.properties["max_atten"])
-
-    def is_open(self):
-        """Returns True if the AttenuatorInstrument has an open connection.
-
-        Since this controller is based on HTTP requests, there is no connection
-        required and the attenuator is always ready to accept requests.
-        """
-        return True
-
-    def close(self):
-        """Closes the connection to the attenuator.
-
-        Since this controller is based on HTTP requests, there is no connection
-        teardowns required.
-        """
-
-    def set_atten(self, idx, value, strict=True, retry=False, **_):
-        """This function sets the attenuation of an attenuator given its index
-        in the instrument.
-
-        Args:
-            idx: A zero-based index that identifies a particular attenuator in
-                an instrument. For instruments that only have one channel, this
-                is ignored by the device.
-            value: A floating point value for nominal attenuation to be set.
-            strict: if True, function raises an error when given out of
-                bounds attenuation values, if false, the function sets out of
-                bounds values to 0 or max_atten.
-            retry: if True, command will be retried if possible
-
-        Raises:
-            InvalidDataError if the attenuator does not respond with the
-            expected output.
-        """
-        if not (0 <= idx < self.num_atten):
-            raise IndexError("Attenuator index out of range!", self.num_atten, idx)
-
-        if value > self.max_atten and strict:
-            raise ValueError("Attenuator value out of range!", self.max_atten, value)
-        # The actual device uses one-based index for channel numbers.
-        adjusted_value = min(max(0, value), self.max_atten)
-        att_req = urllib.request.urlopen(
-            "http://{}:{}/CHAN:{}:SETATT:{}".format(
-                self._ip_address, self._port, idx + 1, adjusted_value
-            ),
-            timeout=self._timeout,
-        )
-        att_resp = att_req.read().decode("utf-8").strip()
-        if att_resp != "1":
-            if retry:
-                self.set_atten(idx, value, strict, retry=False)
-            else:
-                raise attenuator.InvalidDataError(
-                    "Attenuator returned invalid data. Attenuator returned: {}".format(
-                        att_resp
-                    )
-                )
-
-    def get_atten(self, idx, retry=False, **_):
-        """Returns the current attenuation of the attenuator at the given index.
-
-        Args:
-            idx: The index of the attenuator.
-            retry: if True, command will be retried if possible
-
-        Raises:
-            InvalidDataError if the attenuator does not respond with the
-            expected outpu
-
-        Returns:
-            the current attenuation value as a float
-        """
-        if not (0 <= idx < self.num_atten):
-            raise IndexError("Attenuator index out of range!", self.num_atten, idx)
-        att_req = urllib.request.urlopen(
-            "http://{}:{}/CHAN:{}:ATT?".format(self._ip_address, self.port, idx + 1),
-            timeout=self._timeout,
-        )
-        att_resp = att_req.read().decode("utf-8").strip()
-        try:
-            atten_val = float(att_resp)
-        except:
-            if retry:
-                self.get_atten(idx, retry=False)
-            else:
-                raise attenuator.InvalidDataError(
-                    "Attenuator returned invalid data. Attenuator returned: {}".format(
-                        att_resp
-                    )
-                )
-        return atten_val
diff --git a/src/antlion/controllers/attenuator_lib/minicircuits/telnet.py b/src/antlion/controllers/attenuator_lib/minicircuits/telnet.py
deleted file mode 100644
index ad9f0ce..0000000
--- a/src/antlion/controllers/attenuator_lib/minicircuits/telnet.py
+++ /dev/null
@@ -1,146 +0,0 @@
-#!/usr/bin/env python3
-
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Class for Telnet control of Mini-Circuits RCDAT series attenuators
-
-This class provides a wrapper to the MC-RCDAT attenuator modules for purposes
-of simplifying and abstracting control down to the basic necessities. It is
-not the intention of the module to expose all functionality, but to allow
-interchangeable HW to be used.
-
-See http://www.minicircuits.com/softwaredownload/Prog_Manual-6-Programmable_Attenuator.pdf
-"""
-
-from antlion.controllers import attenuator
-from antlion.controllers.attenuator_lib import _tnhelper
-
-
-class AttenuatorInstrument(attenuator.AttenuatorInstrument):
-    """A specific telnet-controlled implementation of AttenuatorInstrument for
-    Mini-Circuits RC-DAT attenuators.
-
-    With the exception of telnet-specific commands, all functionality is defined
-    by the AttenuatorInstrument class. Because telnet is a stateful protocol,
-    the functionality of AttenuatorInstrument is contingent upon a telnet
-    connection being established.
-    """
-
-    def __init__(self, num_atten=0):
-        super(AttenuatorInstrument, self).__init__(num_atten)
-        self._tnhelper = _tnhelper._TNHelper(
-            tx_cmd_separator="\r\n", rx_cmd_separator="\r\n", prompt=""
-        )
-        self.address = None
-
-    def __del__(self):
-        if self.is_open():
-            self.close()
-
-    def open(self, host, port=23):
-        """Opens a telnet connection to the desired AttenuatorInstrument and
-        queries basic information.
-
-        Args:
-            host: A valid hostname (IP address or DNS-resolvable name) to an
-            MC-DAT attenuator instrument.
-            port: An optional port number (defaults to telnet default 23)
-        """
-        self._tnhelper.open(host, port)
-        self.address = host
-
-        if self.num_atten == 0:
-            self.num_atten = 1
-
-        config_str = self._tnhelper.cmd("MN?")
-
-        if config_str.startswith("MN="):
-            config_str = config_str[len("MN=") :]
-
-        self.properties = dict(
-            zip(["model", "max_freq", "max_atten"], config_str.split("-", 2))
-        )
-        self.max_atten = float(self.properties["max_atten"])
-
-    def is_open(self):
-        """Returns True if the AttenuatorInstrument has an open connection."""
-        return bool(self._tnhelper.is_open())
-
-    def close(self):
-        """Closes the telnet connection.
-
-        This should be called as part of any teardown procedure prior to the
-        attenuator instrument leaving scope.
-        """
-        self._tnhelper.close()
-
-    def set_atten(self, idx, value, strict=True, retry=False):
-        """This function sets the attenuation of an attenuator given its index
-        in the instrument.
-
-        Args:
-            idx: A zero-based index that identifies a particular attenuator in
-                an instrument. For instruments that only have one channel, this
-                is ignored by the device.
-            value: A floating point value for nominal attenuation to be set.
-            strict: if True, function raises an error when given out of
-                bounds attenuation values, if false, the function sets out of
-                bounds values to 0 or max_atten.
-            retry: if True, command will be retried if possible
-
-        Raises:
-            InvalidOperationError if the telnet connection is not open.
-            IndexError if the index is not valid for this instrument.
-            ValueError if the requested set value is greater than the maximum
-                attenuation value.
-        """
-
-        if not self.is_open():
-            raise attenuator.InvalidOperationError("Connection not open!")
-
-        if idx >= self.num_atten:
-            raise IndexError("Attenuator index out of range!", self.num_atten, idx)
-
-        if value > self.max_atten and strict:
-            raise ValueError("Attenuator value out of range!", self.max_atten, value)
-        # The actual device uses one-based index for channel numbers.
-        adjusted_value = min(max(0, value), self.max_atten)
-        self._tnhelper.cmd("CHAN:%s:SETATT:%s" % (idx + 1, adjusted_value), retry=retry)
-
-    def get_atten(self, idx, retry=False):
-        """Returns the current attenuation of the attenuator at the given index.
-
-        Args:
-            idx: The index of the attenuator.
-            retry: if True, command will be retried if possible
-
-        Raises:
-            InvalidOperationError if the telnet connection is not open.
-
-        Returns:
-            the current attenuation value as a float
-        """
-        if not self.is_open():
-            raise attenuator.InvalidOperationError("Connection not open!")
-
-        if idx >= self.num_atten or idx < 0:
-            raise IndexError("Attenuator index out of range!", self.num_atten, idx)
-
-        if self.num_atten == 1:
-            atten_val_str = self._tnhelper.cmd(":ATT?", retry=retry)
-        else:
-            atten_val_str = self._tnhelper.cmd("CHAN:%s:ATT?" % (idx + 1), retry=retry)
-        atten_val = float(atten_val_str)
-        return atten_val
diff --git a/src/antlion/controllers/fuchsia_device.py b/src/antlion/controllers/fuchsia_device.py
deleted file mode 100644
index 414afb4..0000000
--- a/src/antlion/controllers/fuchsia_device.py
+++ /dev/null
@@ -1,974 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from typing import Optional, List
-import json
-import logging
-import os
-import re
-import subprocess
-import textwrap
-import time
-
-from antlion import context
-from antlion import logger as acts_logger
-from antlion import signals
-from antlion import utils
-from antlion.capabilities.ssh import DEFAULT_SSH_PORT, SSHConfig, SSHError
-from antlion.controllers import pdu
-from antlion.controllers.fuchsia_lib.ffx import FFX
-from antlion.controllers.fuchsia_lib.lib_controllers.netstack_controller import (
-    NetstackController,
-)
-from antlion.controllers.fuchsia_lib.lib_controllers.wlan_controller import (
-    WlanController,
-)
-from antlion.controllers.fuchsia_lib.lib_controllers.wlan_policy_controller import (
-    WlanPolicyController,
-)
-from antlion.controllers.fuchsia_lib.package_server import PackageServer
-from antlion.controllers.fuchsia_lib.sl4f import SL4F
-from antlion.controllers.fuchsia_lib.ssh import (
-    DEFAULT_SSH_PRIVATE_KEY,
-    DEFAULT_SSH_USER,
-    FuchsiaSSHProvider,
-)
-from antlion.controllers.fuchsia_lib.utils_lib import flash
-from antlion.utils import get_fuchsia_mdns_ipv6_address, get_interface_ip_addresses
-
-MOBLY_CONTROLLER_CONFIG_NAME = "FuchsiaDevice"
-ACTS_CONTROLLER_REFERENCE_NAME = "fuchsia_devices"
-
-FUCHSIA_DEVICE_EMPTY_CONFIG_MSG = "Configuration is empty, abort!"
-FUCHSIA_DEVICE_NOT_LIST_CONFIG_MSG = "Configuration should be a list, abort!"
-FUCHSIA_DEVICE_INVALID_CONFIG = (
-    "Fuchsia device config must be either a str "
-    "or dict. abort! Invalid element %i in %r"
-)
-FUCHSIA_DEVICE_NO_IP_MSG = "No IP address specified, abort!"
-FUCHSIA_COULD_NOT_GET_DESIRED_STATE = "Could not %s %s."
-FUCHSIA_INVALID_CONTROL_STATE = "Invalid control state (%s). abort!"
-
-FUCHSIA_TIME_IN_NANOSECONDS = 1000000000
-
-SL4F_APK_NAME = "com.googlecode.android_scripting"
-DAEMON_INIT_TIMEOUT_SEC = 1
-
-DAEMON_ACTIVATED_STATES = ["running", "start"]
-DAEMON_DEACTIVATED_STATES = ["stop", "stopped"]
-
-FUCHSIA_RECONNECT_AFTER_REBOOT_TIME = 5
-
-CHANNEL_OPEN_TIMEOUT = 5
-
-FUCHSIA_REBOOT_TYPE_SOFT = "soft"
-FUCHSIA_REBOOT_TYPE_SOFT_AND_FLASH = "flash"
-FUCHSIA_REBOOT_TYPE_HARD = "hard"
-
-FUCHSIA_DEFAULT_CONNECT_TIMEOUT = 90
-FUCHSIA_DEFAULT_COMMAND_TIMEOUT = 60
-
-FUCHSIA_DEFAULT_CLEAN_UP_COMMAND_TIMEOUT = 15
-
-FUCHSIA_COUNTRY_CODE_TIMEOUT = 15
-FUCHSIA_DEFAULT_COUNTRY_CODE_US = "US"
-
-MDNS_LOOKUP_RETRY_MAX = 3
-
-VALID_ASSOCIATION_MECHANISMS = {None, "policy", "drivers"}
-IP_ADDRESS_TIMEOUT = 15
-
-
-class FuchsiaDeviceError(signals.ControllerError):
-    pass
-
-
-class FuchsiaConfigError(signals.ControllerError):
-    """Incorrect FuchsiaDevice configuration."""
-
-
-def create(configs):
-    if not configs:
-        raise FuchsiaDeviceError(FUCHSIA_DEVICE_EMPTY_CONFIG_MSG)
-    elif not isinstance(configs, list):
-        raise FuchsiaDeviceError(FUCHSIA_DEVICE_NOT_LIST_CONFIG_MSG)
-    for index, config in enumerate(configs):
-        if isinstance(config, str):
-            configs[index] = {"ip": config}
-        elif not isinstance(config, dict):
-            raise FuchsiaDeviceError(FUCHSIA_DEVICE_INVALID_CONFIG % (index, configs))
-    return get_instances(configs)
-
-
-def destroy(fds):
-    for fd in fds:
-        fd.clean_up()
-        del fd
-
-
-def get_info(fds):
-    """Get information on a list of FuchsiaDevice objects.
-
-    Args:
-        fds: A list of FuchsiaDevice objects.
-
-    Returns:
-        A list of dict, each representing info for FuchsiaDevice objects.
-    """
-    device_info = []
-    for fd in fds:
-        info = {"ip": fd.ip}
-        device_info.append(info)
-    return device_info
-
-
-def get_instances(fds_conf_data):
-    """Create FuchsiaDevice instances from a list of Fuchsia ips.
-
-    Args:
-        fds_conf_data: A list of dicts that contain Fuchsia device info.
-
-    Returns:
-        A list of FuchsiaDevice objects.
-    """
-
-    return [FuchsiaDevice(fd_conf_data) for fd_conf_data in fds_conf_data]
-
-
-class FuchsiaDevice:
-    """Class representing a Fuchsia device.
-
-    Each object of this class represents one Fuchsia device in ACTS.
-
-    Attributes:
-        ip: The full address or Fuchsia abstract name to contact the Fuchsia
-            device at
-        log: A logger object.
-        ssh_port: The SSH TCP port number of the Fuchsia device.
-        sl4f_port: The SL4F HTTP port number of the Fuchsia device.
-        ssh_config: The ssh_config for connecting to the Fuchsia device.
-    """
-
-    def __init__(self, fd_conf_data) -> None:
-        self.conf_data = fd_conf_data
-        if "ip" not in fd_conf_data:
-            raise FuchsiaDeviceError(FUCHSIA_DEVICE_NO_IP_MSG)
-        self.ip: str = fd_conf_data["ip"]
-        self.orig_ip: str = fd_conf_data["ip"]
-        self.sl4f_port: int = fd_conf_data.get("sl4f_port", 80)
-        self.ssh_username: str = fd_conf_data.get("ssh_username", DEFAULT_SSH_USER)
-        self.ssh_port: int = fd_conf_data.get("ssh_port", DEFAULT_SSH_PORT)
-        self.ssh_binary_path: str = fd_conf_data.get("ssh_binary_path", "ssh")
-
-        def expand(path: str) -> str:
-            return os.path.expandvars(os.path.expanduser(path))
-
-        def path_from_config(name: str, default: Optional[str] = None) -> Optional[str]:
-            path = fd_conf_data.get(name, default)
-            if not path:
-                return path
-            return expand(path)
-
-        def assert_exists(name: str, path: str) -> None:
-            if not path:
-                raise FuchsiaDeviceError(
-                    f'Please specify "${name}" in your configuration file'
-                )
-            if not os.path.exists(path):
-                raise FuchsiaDeviceError(
-                    f'Please specify a correct "${name}" in your configuration '
-                    f'file: "{path}" does not exist'
-                )
-
-        self.specific_image: Optional[str] = path_from_config("specific_image")
-        if self.specific_image:
-            assert_exists("specific_image", self.specific_image)
-
-        # Path to a tar.gz archive with pm and amber-files, as necessary for
-        # starting a package server.
-        self.packages_archive_path: Optional[str] = path_from_config(
-            "packages_archive_path", None
-        )
-        if self.packages_archive_path:
-            assert_exists("packages_archive_path", self.packages_archive_path)
-
-        def required_path_from_config(name: str, default: Optional[str] = None) -> str:
-            path = path_from_config(name, default)
-            assert_exists(name, path)
-            return path
-
-        self.ssh_priv_key: str = required_path_from_config(
-            "ssh_priv_key", DEFAULT_SSH_PRIVATE_KEY
-        )
-        self.ffx_binary_path: str = required_path_from_config(
-            "ffx_binary_path", "${FUCHSIA_DIR}/.jiri_root/bin/ffx"
-        )
-
-        self.authorized_file: Optional[str] = fd_conf_data.get(
-            "authorized_file_loc", None
-        )
-        self.serial_number: Optional[str] = fd_conf_data.get("serial_number", None)
-        self.device_type: Optional[str] = fd_conf_data.get("device_type", None)
-        self.product_type: Optional[str] = fd_conf_data.get("product_type", None)
-        self.board_type: Optional[str] = fd_conf_data.get("board_type", None)
-        self.build_number: Optional[str] = fd_conf_data.get("build_number", None)
-        self.build_type: Optional[str] = fd_conf_data.get("build_type", None)
-
-        self.mdns_name: Optional[str] = fd_conf_data.get("mdns_name", None)
-
-        self.hard_reboot_on_fail: bool = fd_conf_data.get("hard_reboot_on_fail", False)
-        self.take_bug_report_on_fail: bool = fd_conf_data.get(
-            "take_bug_report_on_fail", False
-        )
-        self.device_pdu_config = fd_conf_data.get("PduDevice", None)
-        self.config_country_code: str = fd_conf_data.get(
-            "country_code", FUCHSIA_DEFAULT_COUNTRY_CODE_US
-        ).upper()
-
-        output_path = context.get_current_context().get_base_output_path()
-        self.ssh_config = os.path.join(output_path, "ssh_config_{}".format(self.ip))
-        self._generate_ssh_config(self.ssh_config)
-
-        # WLAN interface info is populated inside configure_wlan
-        self.wlan_client_interfaces = {}
-        self.wlan_ap_interfaces = {}
-        self.wlan_client_test_interface_name = fd_conf_data.get(
-            "wlan_client_test_interface", None
-        )
-        self.wlan_ap_test_interface_name = fd_conf_data.get(
-            "wlan_ap_test_interface", None
-        )
-        self.wlan_features: List[str] = fd_conf_data.get("wlan_features", [])
-
-        # Whether to use 'policy' or 'drivers' for WLAN connect/disconnect calls
-        # If set to None, wlan is not configured.
-        self.association_mechanism = None
-        # Defaults to policy layer, unless otherwise specified in the config
-        self.default_association_mechanism = fd_conf_data.get(
-            "association_mechanism", "policy"
-        )
-
-        # Whether to clear and preserve existing saved networks and client
-        # connections state, to be restored at device teardown.
-        self.default_preserve_saved_networks = fd_conf_data.get(
-            "preserve_saved_networks", True
-        )
-
-        if not utils.is_valid_ipv4_address(self.ip) and not utils.is_valid_ipv6_address(
-            self.ip
-        ):
-            mdns_ip = None
-            for retry_counter in range(MDNS_LOOKUP_RETRY_MAX):
-                mdns_ip = get_fuchsia_mdns_ipv6_address(self.ip)
-                if mdns_ip:
-                    break
-                else:
-                    time.sleep(1)
-            if mdns_ip and utils.is_valid_ipv6_address(mdns_ip):
-                # self.ip was actually an mdns name. Use it for self.mdns_name
-                # unless one was explicitly provided.
-                self.mdns_name = self.mdns_name or self.ip
-                self.ip = mdns_ip
-            else:
-                raise ValueError("Invalid IP: %s" % self.ip)
-
-        self.log = acts_logger.create_tagged_trace_logger(
-            "FuchsiaDevice | %s" % self.orig_ip
-        )
-
-        self.ping_rtt_match = re.compile(
-            r"RTT Min/Max/Avg " r"= \[ (.*?) / (.*?) / (.*?) \] ms"
-        )
-        self.serial = re.sub("[.:%]", "_", self.ip)
-        log_path_base = getattr(logging, "log_path", "/tmp/logs")
-        self.log_path = os.path.join(log_path_base, "FuchsiaDevice%s" % self.serial)
-        self.fuchsia_log_file_path = os.path.join(
-            self.log_path, "fuchsialog_%s_debug.txt" % self.serial
-        )
-        self.log_process = None
-        self.package_server = None
-
-        self.init_controllers()
-
-    @property
-    def sl4f(self):
-        """Get the sl4f module configured for this device.
-
-        The sl4f module uses lazy-initialization; it will initialize an sl4f
-        server on the host device when it is required.
-        """
-        if not hasattr(self, "_sl4f"):
-            self._sl4f = SL4F(self.ssh, self.sl4f_port)
-            self.log.info("Started SL4F server")
-        return self._sl4f
-
-    @sl4f.deleter
-    def sl4f(self):
-        if not hasattr(self, "_sl4f"):
-            return
-        self.log.debug("Cleaning up SL4F")
-        del self._sl4f
-
-    @property
-    def ssh(self):
-        """Get the SSH provider module configured for this device."""
-        if not hasattr(self, "_ssh"):
-            if not self.ssh_port:
-                raise FuchsiaConfigError(
-                    'Must provide "ssh_port: <int>" in the device config'
-                )
-            if not self.ssh_priv_key:
-                raise FuchsiaConfigError(
-                    'Must provide "ssh_priv_key: <file path>" in the device config'
-                )
-            self._ssh = FuchsiaSSHProvider(
-                SSHConfig(
-                    self.ssh_username,
-                    self.ip,
-                    self.ssh_priv_key,
-                    port=self.ssh_port,
-                    ssh_binary=self.ssh_binary_path,
-                )
-            )
-        return self._ssh
-
-    @ssh.deleter
-    def ssh(self):
-        if not hasattr(self, "_ssh"):
-            return
-        self.log.debug("Cleaning up SSH")
-        del self._ssh
-
-    @property
-    def ffx(self):
-        """Get the ffx module configured for this device.
-
-        The ffx module uses lazy-initialization; it will initialize an ffx
-        connection to the device when it is required.
-
-        If ffx needs to be reinitialized, delete the "ffx" property and attempt
-        access again. Note re-initialization will interrupt any running ffx
-        calls.
-        """
-        if not hasattr(self, "_ffx"):
-            if not self.mdns_name:
-                raise FuchsiaConfigError(
-                    'Must provide "mdns_name: <device mDNS name>" in the device config'
-                )
-            self._ffx = FFX(
-                self.ffx_binary_path, self.mdns_name, self.ip, self.ssh_priv_key
-            )
-        return self._ffx
-
-    @ffx.deleter
-    def ffx(self):
-        if not hasattr(self, "_ffx"):
-            return
-        self.log.debug("Cleaning up ffx")
-        self._ffx.clean_up()
-        del self._ffx
-
-    def _generate_ssh_config(self, file_path: str):
-        """Generate and write an SSH config for Fuchsia to disk.
-
-        Args:
-            file_path: Path to write the generated SSH config
-        """
-        content = textwrap.dedent(
-            f"""\
-            Host *
-                CheckHostIP no
-                StrictHostKeyChecking no
-                ForwardAgent no
-                ForwardX11 no
-                GSSAPIDelegateCredentials no
-                UserKnownHostsFile /dev/null
-                User fuchsia
-                IdentitiesOnly yes
-                IdentityFile {self.ssh_priv_key}
-                ControlPersist yes
-                ControlMaster auto
-                ControlPath /tmp/fuchsia--%r@%h:%p
-                ServerAliveInterval 1
-                ServerAliveCountMax 1
-                LogLevel ERROR
-            """
-        )
-
-        with open(file_path, "w") as file:
-            file.write(content)
-
-    def init_controllers(self):
-        # Contains Netstack functions
-        self.netstack_controller = NetstackController(self)
-
-        # Contains WLAN core functions
-        self.wlan_controller = WlanController(self)
-
-        # Contains WLAN policy functions like save_network, remove_network, etc
-        self.wlan_policy_controller = WlanPolicyController(self.sl4f, self.ssh)
-
-    def start_package_server(self):
-        if not self.packages_archive_path:
-            self.log.warn(
-                "packages_archive_path is not specified. "
-                "Assuming a package server is already running and configured on "
-                "the DUT. If this is not the case, either run your own package "
-                "server, or configure these fields appropriately. "
-                "This is usually required for the Fuchsia iPerf3 client or "
-                "other testing utilities not on device cache."
-            )
-            return
-        if self.package_server:
-            self.log.warn(
-                "Skipping to start the package server since is already running"
-            )
-            return
-
-        self.package_server = PackageServer(self.packages_archive_path)
-        self.package_server.start()
-        self.package_server.configure_device(self.ssh)
-
-    def run_commands_from_config(self, cmd_dicts):
-        """Runs commands on the Fuchsia device from the config file. Useful for
-        device and/or Fuchsia specific configuration.
-
-        Args:
-            cmd_dicts: list of dictionaries containing the following
-                'cmd': string, command to run on device
-                'timeout': int, seconds to wait for command to run (optional)
-                'skip_status_code_check': bool, disregard errors if true
-
-        Raises:
-            FuchsiaDeviceError: if any of the commands return a non-zero status
-                code and skip_status_code_check is false or undefined.
-        """
-        for cmd_dict in cmd_dicts:
-            try:
-                cmd = cmd_dict["cmd"]
-            except KeyError:
-                raise FuchsiaDeviceError(
-                    'To run a command via config, you must provide key "cmd" '
-                    "containing the command string."
-                )
-
-            timeout = cmd_dict.get("timeout", FUCHSIA_DEFAULT_COMMAND_TIMEOUT)
-            # Catch both boolean and string values from JSON
-            skip_status_code_check = (
-                "true" == str(cmd_dict.get("skip_status_code_check", False)).lower()
-            )
-
-            if skip_status_code_check:
-                self.log.info(f'Running command "{cmd}" and ignoring result.')
-            else:
-                self.log.info(f'Running command "{cmd}".')
-
-            try:
-                result = self.ssh.run(cmd, timeout_sec=timeout)
-                self.log.debug(result)
-            except SSHError as e:
-                if not skip_status_code_check:
-                    raise FuchsiaDeviceError(
-                        "Failed device specific commands for initial configuration"
-                    ) from e
-
-    def configure_wlan(
-        self, association_mechanism: str = None, preserve_saved_networks: bool = None
-    ) -> None:
-        """
-        Readies device for WLAN functionality. If applicable, connects to the
-        policy layer and clears/saves preexisting saved networks.
-
-        Args:
-            association_mechanism: either 'policy' or 'drivers'. If None, uses
-                the default value from init (can be set by ACTS config)
-            preserve_saved_networks: whether to clear existing saved
-                networks, and preserve them for restoration later. If None, uses
-                the default value from init (can be set by ACTS config)
-
-        Raises:
-            FuchsiaDeviceError, if configuration fails
-        """
-
-        # Set the country code US by default, or country code provided
-        # in ACTS config
-        self.configure_regulatory_domain(self.config_country_code)
-
-        # If args aren't provided, use the defaults, which can be set in the
-        # config.
-        if association_mechanism is None:
-            association_mechanism = self.default_association_mechanism
-        if preserve_saved_networks is None:
-            preserve_saved_networks = self.default_preserve_saved_networks
-
-        if association_mechanism not in VALID_ASSOCIATION_MECHANISMS:
-            raise FuchsiaDeviceError(
-                "Invalid FuchsiaDevice association_mechanism: %s"
-                % association_mechanism
-            )
-
-        # Allows for wlan to be set up differently in different tests
-        if self.association_mechanism:
-            self.log.info("Deconfiguring WLAN")
-            self.deconfigure_wlan()
-
-        self.association_mechanism = association_mechanism
-
-        self.log.info(
-            "Configuring WLAN w/ association mechanism: " f"{association_mechanism}"
-        )
-        if association_mechanism == "drivers":
-            self.log.warn(
-                "You may encounter unusual device behavior when using the "
-                "drivers directly for WLAN. This should be reserved for "
-                "debugging specific issues. Normal test runs should use the "
-                "policy layer."
-            )
-            if preserve_saved_networks:
-                self.log.warn(
-                    "Unable to preserve saved networks when using drivers "
-                    "association mechanism (requires policy layer control)."
-                )
-        else:
-            # This requires SL4F calls, so it can only happen with actual
-            # devices, not with unit tests.
-            self.wlan_policy_controller.configure_wlan(preserve_saved_networks)
-
-        # Retrieve WLAN client and AP interfaces
-        self.wlan_controller.update_wlan_interfaces()
-
-    def deconfigure_wlan(self):
-        """
-        Stops WLAN functionality (if it has been started). Used to allow
-        different tests to use WLAN differently (e.g. some tests require using
-        wlan policy, while the abstract wlan_device can be setup to use policy
-        or drivers)
-
-        Raises:
-            FuchsiaDeviveError, if deconfigure fails.
-        """
-        if not self.association_mechanism:
-            self.log.debug("WLAN not configured before deconfigure was called.")
-            return
-        # If using policy, stop client connections. Otherwise, just clear
-        # variables.
-        if self.association_mechanism != "drivers":
-            self.wlan_policy_controller._deconfigure_wlan()
-        self.association_mechanism = None
-
-    def reboot(
-        self,
-        use_ssh: bool = False,
-        unreachable_timeout: int = FUCHSIA_DEFAULT_CONNECT_TIMEOUT,
-        reboot_type: str = FUCHSIA_REBOOT_TYPE_SOFT,
-        testbed_pdus: List[pdu.PduDevice] = [],
-    ) -> None:
-        """Reboot a FuchsiaDevice.
-
-        Soft reboots the device, verifies it becomes unreachable, then verifies
-        it comes back online. Re-initializes services so the tests can continue.
-
-        Args:
-            use_ssh: if True, use fuchsia shell command via ssh to reboot
-                instead of SL4F.
-            unreachable_timeout: time to wait for device to become unreachable.
-            reboot_type: 'soft', 'hard' or 'flash'.
-            testbed_pdus: all testbed PDUs.
-
-        Raises:
-            ConnectionError, if device fails to become unreachable or fails to
-                come back up.
-        """
-        if reboot_type == FUCHSIA_REBOOT_TYPE_SOFT:
-            if use_ssh:
-                self.log.info("Soft rebooting via SSH")
-                try:
-                    self.ssh.run(
-                        "dm reboot", timeout_sec=FUCHSIA_RECONNECT_AFTER_REBOOT_TIME
-                    )
-                except SSHError as e:
-                    if "closed by remote host" not in e.result.stderr:
-                        raise e
-            else:
-                self.log.info("Soft rebooting via SL4F")
-                self.sl4f.hardware_power_statecontrol_lib.suspendReboot(timeout=3)
-            self.ssh.wait_until_unreachable(timeout_sec=unreachable_timeout)
-
-        elif reboot_type == FUCHSIA_REBOOT_TYPE_HARD:
-            self.log.info("Hard rebooting via PDU")
-            if not testbed_pdus:
-                raise AttributeError(
-                    "Testbed PDUs must be supplied " "to hard reboot a fuchsia_device."
-                )
-            device_pdu, device_pdu_port = pdu.get_pdu_port_for_device(
-                self.device_pdu_config, testbed_pdus
-            )
-            self.log.info("Killing power to FuchsiaDevice")
-            device_pdu.off(str(device_pdu_port))
-            self.ssh.wait_until_unreachable(timeout_sec=unreachable_timeout)
-            self.log.info("Restoring power to FuchsiaDevice")
-            device_pdu.on(str(device_pdu_port))
-
-        elif reboot_type == FUCHSIA_REBOOT_TYPE_SOFT_AND_FLASH:
-            flash(self, use_ssh, FUCHSIA_RECONNECT_AFTER_REBOOT_TIME)
-
-        else:
-            raise ValueError("Invalid reboot type: %s" % reboot_type)
-
-        # Cleanup services
-        self.stop_services()
-
-        # TODO (b/246852449): Move configure_wlan to other controllers.
-        # If wlan was configured before reboot, it must be configured again
-        # after rebooting, as it was before reboot. No preserving should occur.
-        if self.association_mechanism:
-            pre_reboot_association_mechanism = self.association_mechanism
-            # Prevent configure_wlan from thinking it needs to deconfigure first
-            self.association_mechanism = None
-            self.configure_wlan(
-                association_mechanism=pre_reboot_association_mechanism,
-                preserve_saved_networks=False,
-            )
-
-        self.log.info("Device has rebooted")
-
-    def version(self) -> str:
-        """Return the version of Fuchsia running on the device."""
-        return self.sl4f.device_lib.get_version()["result"]
-
-    def device_name(self) -> str:
-        """Return the name of the device."""
-        return self.sl4f.device_lib.get_device_name()["result"]
-
-    def product_name(self) -> str:
-        """Return the product name of the device."""
-        return self.sl4f.device_lib.get_product_name()["result"]
-
-    def ping(
-        self,
-        dest_ip,
-        count=3,
-        interval=1000,
-        timeout=1000,
-        size=25,
-        additional_ping_params=None,
-    ):
-        """Pings from a Fuchsia device to an IPv4 address or hostname
-
-        Args:
-            dest_ip: (str) The ip or hostname to ping.
-            count: (int) How many icmp packets to send.
-            interval: (int) How long to wait between pings (ms)
-            timeout: (int) How long to wait before having the icmp packet
-                timeout (ms).
-            size: (int) Size of the icmp packet.
-            additional_ping_params: (str) command option flags to
-                append to the command string
-
-        Returns:
-            A dictionary for the results of the ping.  The dictionary contains
-            the following items:
-                status: Whether the ping was successful.
-                rtt_min: The minimum round trip time of the ping.
-                rtt_max: The minimum round trip time of the ping.
-                rtt_avg: The avg round trip time of the ping.
-                stdout: The standard out of the ping command.
-                stderr: The standard error of the ping command.
-        """
-        rtt_min = None
-        rtt_max = None
-        rtt_avg = None
-        self.log.debug("Pinging %s..." % dest_ip)
-        if not additional_ping_params:
-            additional_ping_params = ""
-
-        try:
-            ping_result = self.ssh.run(
-                f"ping -c {count} -i {interval} -t {timeout} -s {size} "
-                f"{additional_ping_params} {dest_ip}"
-            )
-        except SSHError as e:
-            ping_result = e.result
-
-        if ping_result.stderr:
-            status = False
-        else:
-            status = True
-            rtt_line = ping_result.stdout.split("\n")[:-1]
-            rtt_line = rtt_line[-1]
-            rtt_stats = re.search(self.ping_rtt_match, rtt_line)
-            rtt_min = rtt_stats.group(1)
-            rtt_max = rtt_stats.group(2)
-            rtt_avg = rtt_stats.group(3)
-        return {
-            "status": status,
-            "rtt_min": rtt_min,
-            "rtt_max": rtt_max,
-            "rtt_avg": rtt_avg,
-            "stdout": ping_result.stdout,
-            "stderr": ping_result.stderr,
-        }
-
-    def can_ping(
-        self,
-        dest_ip,
-        count=1,
-        interval=1000,
-        timeout=1000,
-        size=25,
-        additional_ping_params=None,
-    ) -> bool:
-        """Returns whether fuchsia device can ping a given dest address"""
-        ping_result = self.ping(
-            dest_ip,
-            count=count,
-            interval=interval,
-            timeout=timeout,
-            size=size,
-            additional_ping_params=additional_ping_params,
-        )
-        return ping_result["status"]
-
-    def clean_up(self):
-        """Cleans up the FuchsiaDevice object, releases any resources it
-        claimed, and restores saved networks if applicable. For reboots, use
-        clean_up_services only.
-
-        Note: Any exceptions thrown in this method must be caught and handled,
-        ensuring that clean_up_services is run. Otherwise, the syslog listening
-        thread will never join and will leave tests hanging.
-        """
-        # If and only if wlan is configured, and using the policy layer
-        if self.association_mechanism == "policy":
-            try:
-                self.wlan_policy_controller.clean_up()
-            except Exception as err:
-                self.log.warning("Unable to clean up WLAN Policy layer: %s" % err)
-
-        self.stop_services()
-
-        if self.package_server:
-            self.package_server.clean_up()
-
-    def get_interface_ip_addresses(self, interface):
-        return get_interface_ip_addresses(self, interface)
-
-    def wait_for_ipv4_addr(self, interface: str) -> None:
-        """Checks if device has an ipv4 private address. Sleeps 1 second between
-        retries.
-
-        Args:
-            interface: name of interface from which to get ipv4 address.
-
-        Raises:
-            ConnectionError, if device does not have an ipv4 address after all
-            timeout.
-        """
-        self.log.info(
-            f"Checking for valid ipv4 addr. Retry {IP_ADDRESS_TIMEOUT} seconds."
-        )
-        timeout = time.time() + IP_ADDRESS_TIMEOUT
-        while time.time() < timeout:
-            ip_addrs = self.get_interface_ip_addresses(interface)
-
-            if len(ip_addrs["ipv4_private"]) > 0:
-                self.log.info(
-                    "Device has an ipv4 address: " f"{ip_addrs['ipv4_private'][0]}"
-                )
-                break
-            else:
-                self.log.debug(
-                    "Device does not yet have an ipv4 address...retrying in 1 "
-                    "second."
-                )
-                time.sleep(1)
-        else:
-            raise ConnectionError("Device failed to get an ipv4 address.")
-
-    def wait_for_ipv6_addr(self, interface: str) -> None:
-        """Checks if device has an ipv6 private local address. Sleeps 1 second
-        between retries.
-
-        Args:
-            interface: name of interface from which to get ipv6 address.
-
-        Raises:
-            ConnectionError, if device does not have an ipv6 address after all
-            timeout.
-        """
-        self.log.info(
-            f"Checking for valid ipv6 addr. Retry {IP_ADDRESS_TIMEOUT} seconds."
-        )
-        timeout = time.time() + IP_ADDRESS_TIMEOUT
-        while time.time() < timeout:
-            ip_addrs = self.get_interface_ip_addresses(interface)
-            if len(ip_addrs["ipv6_private_local"]) > 0:
-                self.log.info(
-                    "Device has an ipv6 private local address: "
-                    f"{ip_addrs['ipv6_private_local'][0]}"
-                )
-                break
-            else:
-                self.log.debug(
-                    "Device does not yet have an ipv6 address...retrying in 1 "
-                    "second."
-                )
-                time.sleep(1)
-        else:
-            raise ConnectionError("Device failed to get an ipv6 address.")
-
-    def check_connect_response(self, connect_response):
-        if connect_response.get("error") is None:
-            # Checks the response from SL4F and if there is no error, check
-            # the result.
-            connection_result = connect_response.get("result")
-            if not connection_result:
-                # Ideally the error would be present but just outputting a log
-                # message until available.
-                self.log.debug("Connect call failed, aborting!")
-                return False
-            else:
-                # Returns True if connection was successful.
-                return True
-        else:
-            # the response indicates an error - log and raise failure
-            self.log.debug(
-                "Aborting! - Connect call failed with error: %s"
-                % connect_response.get("error")
-            )
-            return False
-
-    def check_disconnect_response(self, disconnect_response):
-        if disconnect_response.get("error") is None:
-            # Returns True if disconnect was successful.
-            return True
-        else:
-            # the response indicates an error - log and raise failure
-            self.log.debug(
-                "Disconnect call failed with error: %s"
-                % disconnect_response.get("error")
-            )
-            return False
-
-    # TODO(fxb/64657): Determine more stable solution to country code config on
-    # device bring up.
-    def configure_regulatory_domain(self, desired_country_code):
-        """Allows the user to set the device country code via ACTS config
-
-        Usage:
-            In FuchsiaDevice config, add "country_code": "<CC>"
-        """
-        # Country code can be None, from antlion config.
-        if desired_country_code:
-            desired_country_code = desired_country_code.upper()
-            response = self.sl4f.regulatory_region_lib.setRegion(desired_country_code)
-            if response.get("error"):
-                raise FuchsiaDeviceError(
-                    "Failed to set regulatory domain. Err: %s" % response["error"]
-                )
-
-            phy_list_response = self.sl4f.wlan_lib.wlanPhyIdList()
-            if phy_list_response.get("error"):
-                raise FuchsiaDeviceError(
-                    f'Failed to get phy list. Err: {response["error"]}'
-                )
-            phy_list = phy_list_response.get("result")
-            if not phy_list:
-                raise FuchsiaDeviceError("No phy available in phy list")
-            phy_id = phy_list[0]
-
-            end_time = time.time() + FUCHSIA_COUNTRY_CODE_TIMEOUT
-            while time.time() < end_time:
-                ascii_cc = self.sl4f.wlan_lib.wlanGetCountry(phy_id).get("result")
-                # Convert ascii_cc to string, then compare
-                if ascii_cc and (
-                    "".join(chr(c) for c in ascii_cc).upper() == desired_country_code
-                ):
-                    self.log.debug(
-                        "Country code successfully set to %s." % desired_country_code
-                    )
-                    return
-                self.log.debug("Country code not yet updated. Retrying.")
-                time.sleep(1)
-            raise FuchsiaDeviceError(
-                "Country code never updated to %s" % desired_country_code
-            )
-
-    def stop_services(self):
-        """Stops ffx daemon, deletes SSH property, and deletes SL4F property."""
-        self.log.info("Stopping host device services.")
-        del self.sl4f
-        del self.ssh
-        del self.ffx
-
-    def load_config(self, config):
-        pass
-
-    def take_bug_report(self, test_name=None, begin_time=None):
-        """Takes a bug report on the device and stores it in a file.
-
-        Args:
-            test_name: DEPRECATED. Do not specify this argument; it is only used
-                for logging. Name of the test case that triggered this bug
-                report.
-            begin_time: DEPRECATED. Do not specify this argument; it allows
-                overwriting of bug reports when this function is called several
-                times in one test. Epoch time when the test started. If not
-                specified, the current time will be used.
-        """
-        if test_name:
-            self.log.info(f"Taking snapshot of {self.mdns_name} for {test_name}")
-        else:
-            self.log.info(f"Taking snapshot of {self.mdns_name}")
-
-        epoch = begin_time if begin_time else utils.get_current_epoch_time()
-        time_stamp = acts_logger.normalize_log_line_timestamp(
-            acts_logger.epoch_to_log_line_timestamp(epoch)
-        )
-        out_dir = context.get_current_context().get_full_output_path()
-        out_path = os.path.join(out_dir, f"{self.mdns_name}_{time_stamp}.zip")
-
-        try:
-            subprocess.run(
-                [f"ssh -F {self.ssh_config} {self.ip} snapshot > {out_path}"],
-                shell=True,
-            )
-            self.log.info(f"Snapshot saved to {out_path}")
-        except Exception as err:
-            self.log.error(f"Failed to take snapshot: {err}")
-
-    def take_bt_snoop_log(self, custom_name=None):
-        """Takes a the bt-snoop log from the device and stores it in a file
-        in a pcap format.
-        """
-        bt_snoop_path = context.get_current_context().get_full_output_path()
-        time_stamp = acts_logger.normalize_log_line_timestamp(
-            acts_logger.epoch_to_log_line_timestamp(time.time())
-        )
-        out_name = "FuchsiaDevice%s_%s" % (
-            self.serial,
-            time_stamp.replace(" ", "_").replace(":", "-"),
-        )
-        out_name = "%s.pcap" % out_name
-        if custom_name:
-            out_name = "%s_%s.pcap" % (self.serial, custom_name)
-        else:
-            out_name = "%s.pcap" % out_name
-        full_out_path = os.path.join(bt_snoop_path, out_name)
-        bt_snoop_data = self.ssh.run("bt-snoop-cli -d -f pcap").raw_stdout
-        bt_snoop_file = open(full_out_path, "wb")
-        bt_snoop_file.write(bt_snoop_data)
-        bt_snoop_file.close()
diff --git a/src/antlion/controllers/fuchsia_lib/device_lib.py b/src/antlion/controllers/fuchsia_lib/device_lib.py
deleted file mode 100644
index f7ad6b6..0000000
--- a/src/antlion/controllers/fuchsia_lib/device_lib.py
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2023 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-
-class DeviceLib(BaseLib):
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "device")
-
-    def get_device_name(self) -> str:
-        """Get the device name."""
-
-        return self.send_command("device_facade.GetDeviceName", {})
-
-    def get_product_name(self) -> str:
-        """Get the product name."""
-
-        return self.send_command("device_facade.GetProduct", {})
-
-    def get_version(self):
-        """Get the device version."""
-
-        return self.send_command("device_facade.GetVersion", {})
diff --git a/src/antlion/controllers/fuchsia_lib/ffx.py b/src/antlion/controllers/fuchsia_lib/ffx.py
deleted file mode 100644
index 3db6c8d..0000000
--- a/src/antlion/controllers/fuchsia_lib/ffx.py
+++ /dev/null
@@ -1,336 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import os
-import tempfile
-import subprocess
-import time
-
-from pathlib import Path, PurePath
-from shutil import rmtree
-from typing import Any, MutableMapping, Optional
-
-from antlion import context
-from antlion import logger
-from antlion import signals
-from antlion import utils
-
-FFX_DEFAULT_COMMAND_TIMEOUT: int = 60
-
-
-class FFXError(signals.TestError):
-    """Non-zero error code returned from a ffx command."""
-
-    def __init__(self, command: str, process: subprocess.CalledProcessError) -> None:
-        self.command = command
-        self.stdout: str = process.stdout.decode("utf-8", errors="replace")
-        self.stderr: str = process.stderr.decode("utf-8", errors="replace")
-        self.exit_status = process.returncode
-
-    def __str__(self) -> str:
-        return f'ffx subcommand "{self.command}" returned {self.exit_status}, stdout: "{self.stdout}", stderr: "{self.stderr}"'
-
-
-class FFXTimeout(signals.TestError):
-    """Timed out running a ffx command."""
-
-
-class FFX:
-    """Device-specific controller for the ffx tool.
-
-    Attributes:
-        log: Logger for the device-specific instance of ffx.
-        binary_path: Path to the ffx binary.
-        mdns_name: mDNS nodename of the default Fuchsia target.
-        ip: IP address of the default Fuchsia target.
-        ssh_private_key_path: Path to Fuchsia DUT SSH private key.
-    """
-
-    def __init__(
-        self,
-        binary_path: str,
-        mdns_name: str,
-        ip: str = None,
-        ssh_private_key_path: str = None,
-    ):
-        """
-        Args:
-            binary_path: Path to ffx binary.
-            target: Fuchsia mDNS nodename of default target.
-            ssh_private_key_path: Path to SSH private key for talking to the
-                Fuchsia DUT.
-        """
-        self.log = logger.create_tagged_trace_logger(f"ffx | {mdns_name}")
-        self.binary_path = binary_path
-        self.mdns_name = mdns_name
-        self.ip = ip
-        self.ssh_private_key_path = ssh_private_key_path
-
-        self._env_config_path: Optional[str] = None
-        self._sock_dir: Optional[str] = None
-        self._ssh_auth_sock_path: Optional[str] = None
-        self._overnet_socket_path: Optional[str] = None
-        self._has_been_reachable = False
-        self._has_logged_version = False
-
-    def clean_up(self) -> None:
-        if self._env_config_path:
-            self.run("daemon stop", skip_reachability_check=True)
-        if self._ssh_auth_sock_path:
-            Path(self._ssh_auth_sock_path).unlink(missing_ok=True)
-        if self._overnet_socket_path:
-            Path(self._overnet_socket_path).unlink(missing_ok=True)
-        if self._sock_dir:
-            rmtree(self._sock_dir)
-
-        self._env_config_path = None
-        self._sock_dir = None
-        self._ssh_auth_sock_path = None
-        self._overnet_socket_path = None
-        self._has_been_reachable = False
-        self._has_logged_version = False
-
-    def run(
-        self,
-        command: str,
-        timeout_sec: int = FFX_DEFAULT_COMMAND_TIMEOUT,
-        skip_status_code_check: bool = False,
-        skip_reachability_check: bool = False,
-    ) -> subprocess.CompletedProcess:
-        """Runs an ffx command.
-
-        Verifies reachability before running, if it hasn't already.
-
-        Args:
-            command: Command to run with ffx.
-            timeout_sec: Seconds to wait for a command to complete.
-            skip_status_code_check: Whether to check for the status code.
-            verify_reachable: Whether to verify reachability before running.
-
-        Raises:
-            FFXTimeout: when the command times out.
-            FFXError: when the command returns non-zero and skip_status_code_check is False.
-
-        Returns:
-            The results of the command. Note subprocess.CompletedProcess returns
-            stdout and stderr as a byte-array, not a string. Treat these members
-            as such or convert to a string using bytes.decode('utf-8').
-        """
-        if not self._env_config_path:
-            self._create_isolated_environment()
-        if not self._has_been_reachable and not skip_reachability_check:
-            self.log.info(f'Verifying reachability before running "{command}"')
-            self.verify_reachable()
-
-        self.log.debug(f'Running "{command}".')
-        full_command = f"{self.binary_path} -e {self._env_config_path} {command}"
-
-        try:
-            result = subprocess.run(
-                full_command.split(),
-                capture_output=True,
-                timeout=timeout_sec,
-                check=not skip_status_code_check,
-            )
-        except subprocess.CalledProcessError as e:
-            raise FFXError(command, e) from e
-        except subprocess.TimeoutExpired as e:
-            raise FFXTimeout(f'Timed out running "{full_command}"') from e
-
-        return result
-
-    def _create_isolated_environment(self) -> None:
-        """Create a new isolated environment for ffx.
-
-        This is needed to avoid overlapping ffx daemons while testing in
-        parallel, causing the ffx invocations to “upgrade” one daemon to
-        another, which appears as a flap/restart to another test.
-        """
-        # Store ffx files in a unique directory. Timestamp is used to prevent
-        # files from being overwritten in the case when a test intentionally
-        # reboots or resets the device such that a new isolated ffx environment
-        # is created.
-        root_dir = context.get_current_context().get_full_output_path()
-        epoch = utils.get_current_epoch_time()
-        time_stamp = logger.normalize_log_line_timestamp(
-            logger.epoch_to_log_line_timestamp(epoch)
-        )
-        target_dir = os.path.join(root_dir, f"{self.mdns_name}_{time_stamp}")
-        os.makedirs(target_dir, exist_ok=True)
-
-        # Sockets need to be created in a different directory to be guaranteed
-        # to stay under the maximum socket path length of 104 characters.
-        # See https://unix.stackexchange.com/q/367008
-        self._sock_dir = tempfile.mkdtemp()
-        # On MacOS, the socket paths need to be just paths (not pre-created
-        # Python tempfiles, which are not socket files).
-        self._ssh_auth_sock_path = str(PurePath(self._sock_dir, "ssh_auth_sock"))
-        self._overnet_socket_path = str(PurePath(self._sock_dir, "overnet_socket"))
-
-        config: MutableMapping[str, Any] = {
-            "target": {
-                "default": self.mdns_name,
-            },
-            # Use user-specific and device-specific locations for sockets.
-            # Avoids user permission errors in a multi-user test environment.
-            # Avoids daemon upgrades when running tests in parallel in a CI
-            # environment.
-            "ssh": {
-                "auth-sock": self._ssh_auth_sock_path,
-            },
-            "overnet": {
-                "socket": self._overnet_socket_path,
-            },
-            # Configure the ffx daemon to log to a place where we can read it.
-            # Note, ffx client will still output to stdout, not this log
-            # directory.
-            "log": {
-                "enabled": True,
-                "dir": [target_dir],
-            },
-            # Disable analytics to decrease noise on the network.
-            "ffx": {
-                "analytics": {
-                    "disabled": True,
-                },
-            },
-            # Prevent log collection from all devices the ffx daemon sees; only
-            # collect logs from the target device.
-            #
-            # TODO(https://fxbug.dev/118764): Consider re-enabling after
-            # resolution of the issue causing a reboot of the target device.
-            "proactive_log": {
-                "enabled": False,
-            },
-        }
-
-        if self.ip:
-            config["discovery"] = {
-                "mdns": {
-                    "enabled": False,
-                },
-            }
-
-        # ffx looks for the private key in several default locations. For
-        # testbeds which have the private key in another location, set it now.
-        if self.ssh_private_key_path:
-            config["ssh"]["priv"] = self.ssh_private_key_path
-
-        config_path = os.path.join(target_dir, "ffx_config.json")
-        with open(config_path, "w", encoding="utf-8") as f:
-            json.dump(config, f, ensure_ascii=False, indent=4)
-
-        env = {
-            "user": config_path,
-            "build": None,
-            "global": None,
-        }
-        self._env_config_path = os.path.join(target_dir, "ffx_env.json")
-        with open(self._env_config_path, "w", encoding="utf-8") as f:
-            json.dump(env, f, ensure_ascii=False, indent=4)
-
-        # The ffx daemon will started automatically when needed. There is no
-        # need to start it manually here.
-
-    def verify_reachable(self, timeout_sec: int = FFX_DEFAULT_COMMAND_TIMEOUT) -> None:
-        """Verify the target is reachable via RCS and various services.
-
-        Blocks until the device allows for an RCS connection. If the device
-        isn't reachable within a short time, logs a warning before waiting
-        longer.
-
-        Verifies the RCS connection by fetching information from the device,
-        which exercises several debug and informational FIDL services.
-
-        When called for the first time, the versions will be checked for
-        compatibility.
-
-        Args:
-            timeout_sec: Seconds to wait for reachability check
-
-        Raises:
-            FFXError: when an unknown error occurs
-            FFXTimeout: when the target is unreachable
-        """
-        cmd = "target wait"
-        if self.ip:
-            # `target add` does what `target wait` does but adds an entry
-            # to ensure connections can happen without mDNS.
-            # TODO(https://fxbug.dev/105530): Update manual target parsing in
-            # ffx.
-            cmd = f"target add {self.ip}"
-
-        timeout = time.perf_counter() + timeout_sec
-        while True:
-            try:
-                self.run(cmd, timeout_sec=5, skip_reachability_check=True)
-                break
-            except FFXError as e:
-                if "took too long connecting to ascendd socket" in e.stderr:
-                    err = e
-                else:
-                    raise e
-            except FFXTimeout as e:
-                err = e
-
-            if time.perf_counter() > timeout:
-                raise FFXTimeout(
-                    f"Waited over {timeout_sec}s for ffx to become reachable"
-                ) from err
-
-        # Use a shorter timeout than default because device information
-        # gathering can hang for a long time if the device is not actually
-        # connectable.
-        try:
-            result = self.run(
-                "target show --json", timeout_sec=15, skip_reachability_check=True
-            )
-        except Exception as e:
-            self.log.error(
-                f'Failed to reach target device. Try running "{self.binary_path}'
-                + ' doctor" to diagnose issues.'
-            )
-            raise e
-
-        self._has_been_reachable = True
-
-        if not self._has_logged_version:
-            self._has_logged_version = True
-            self.compare_version(result)
-
-    def compare_version(self, target_show_result: subprocess.CompletedProcess) -> None:
-        """Compares the version of Fuchsia with the version of ffx.
-
-        Args:
-            target_show_result: Result of the target show command with JSON
-                output mode enabled
-        """
-        result_json = json.loads(target_show_result.stdout)
-        build_info = next(filter(lambda s: s.get("label") == "build", result_json))
-        version_info = next(
-            filter(lambda s: s.get("label") == "version", build_info["child"])
-        )
-        device_version = version_info.get("value")
-        ffx_version = self.run("version").stdout.decode("utf-8")
-
-        self.log.info(f"Device version: {device_version}, ffx version: {ffx_version}")
-        if device_version != ffx_version:
-            self.log.warning(
-                "ffx versions that differ from device versions may"
-                + " have compatibility issues. It is recommended to"
-                + " use versions within 6 weeks of each other."
-            )
diff --git a/src/antlion/controllers/fuchsia_lib/hardware_power_statecontrol_lib.py b/src/antlion/controllers/fuchsia_lib/hardware_power_statecontrol_lib.py
deleted file mode 100644
index 30af9a8..0000000
--- a/src/antlion/controllers/fuchsia_lib/hardware_power_statecontrol_lib.py
+++ /dev/null
@@ -1,90 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import http
-
-import antlion.controllers.fuchsia_lib.base_lib as base_lib
-
-HW_PWR_STATE_CONTROL_TIMEOUT = 5
-
-
-class FuchsiaHardwarePowerStatecontrolLib(base_lib.BaseLib):
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "hardware_power_statecontrol")
-
-    def send_command(self, test_cmd, test_args, response_timeout=30):
-        """Wrap send_command to allow disconnects after sending the request."""
-        try:
-            response = super().send_command(test_cmd, test_args, response_timeout)
-        except (
-            TimeoutError,
-            http.client.RemoteDisconnected,
-            base_lib.DeviceOffline,
-        ) as e:
-            logging.warn(f"Error while sending power command: {e}")
-            return
-        return response
-
-    def suspendReboot(self, timeout=HW_PWR_STATE_CONTROL_TIMEOUT):
-        """Call Suspend Reboot.
-
-        Returns:
-            None if success.
-        """
-        test_cmd = "hardware_power_statecontrol_facade.SuspendReboot"
-        test_args = {}
-        return self.send_command(test_cmd, test_args, response_timeout=timeout)
-
-    def suspendRebootBootloader(self, timeout=HW_PWR_STATE_CONTROL_TIMEOUT):
-        """Call Suspend Reboot Bootloader
-
-        Returns:
-            None if success.
-        """
-        test_cmd = "hardware_power_statecontrol_facade.SuspendRebootBootloader"
-        test_args = {}
-        return self.send_command(test_cmd, test_args, response_timeout=timeout)
-
-    def suspendPoweroff(self, timeout=HW_PWR_STATE_CONTROL_TIMEOUT):
-        """Call Suspend Poweroff
-
-        Returns:
-            None if success.
-        """
-        test_cmd = "hardware_power_statecontrol_facade.SuspendPoweroff"
-        test_args = {}
-        return self.send_command(test_cmd, test_args, response_timeout=timeout)
-
-    def suspendMexec(self, timeout=HW_PWR_STATE_CONTROL_TIMEOUT):
-        """Call Suspend Mexec
-
-        Returns:
-            None if success.
-        """
-        test_cmd = "hardware_power_statecontrol_facade.SuspendMexec"
-        test_args = {}
-        return self.send_command(test_cmd, test_args, response_timeout=timeout)
-
-    def suspendRam(self, timeout=HW_PWR_STATE_CONTROL_TIMEOUT):
-        """Call Suspend Ram
-
-        Returns:
-            None if success.
-        """
-        test_cmd = "hardware_power_statecontrol_facade.SuspendRam"
-        test_args = {}
-        return self.send_command(test_cmd, test_args, response_timeout=timeout)
diff --git a/src/antlion/controllers/fuchsia_lib/lib_controllers/netstack_controller.py b/src/antlion/controllers/fuchsia_lib/lib_controllers/netstack_controller.py
deleted file mode 100644
index 0ff858c..0000000
--- a/src/antlion/controllers/fuchsia_lib/lib_controllers/netstack_controller.py
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import logger
-from antlion import signals
-
-
-class NetstackControllerError(signals.ControllerError):
-    pass
-
-
-class NetstackController:
-    """Contains methods related to netstack, to be used in FuchsiaDevice object"""
-
-    def __init__(self, fuchsia_device):
-        self.device = fuchsia_device
-        self.log = logger.create_tagged_trace_logger(
-            "NetstackController for FuchsiaDevice | %s" % self.device.ip
-        )
-
-    def list_interfaces(self):
-        """Retrieve netstack interfaces from netstack facade
-
-        Returns:
-            List of dicts, one for each interface, containing interface
-            information
-        """
-        response = self.device.sl4f.netstack_lib.netstackListInterfaces()
-        if response.get("error"):
-            raise NetstackControllerError(
-                "Failed to get network interfaces list: %s" % response["error"]
-            )
-        return response["result"]
diff --git a/src/antlion/controllers/fuchsia_lib/lib_controllers/wlan_controller.py b/src/antlion/controllers/fuchsia_lib/lib_controllers/wlan_controller.py
deleted file mode 100644
index 922b167..0000000
--- a/src/antlion/controllers/fuchsia_lib/lib_controllers/wlan_controller.py
+++ /dev/null
@@ -1,189 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-
-from antlion import logger
-from antlion import signals
-from antlion import utils
-
-TIME_TO_SLEEP_BETWEEN_RETRIES = 1
-TIME_TO_WAIT_FOR_COUNTRY_CODE = 10
-
-
-class WlanControllerError(signals.ControllerError):
-    pass
-
-
-class WlanController:
-    """Contains methods related to wlan core, to be used in FuchsiaDevice object"""
-
-    def __init__(self, fuchsia_device):
-        self.device = fuchsia_device
-        self.log = logger.create_tagged_trace_logger(
-            "WlanController for FuchsiaDevice | %s" % self.device.ip
-        )
-
-    # TODO(70501): Wrap wlan_lib functions and setup from FuchsiaDevice here
-    # (similar to how WlanPolicyController does it) to prevent FuchsiaDevice
-    # from growing too large.
-    def _configure_wlan(self):
-        pass
-
-    def _deconfigure_wlan(self):
-        pass
-
-    def update_wlan_interfaces(self):
-        """Retrieves WLAN interfaces from device and sets the FuchsiaDevice
-        attributes.
-        """
-        wlan_interfaces = self.get_interfaces_by_role()
-        self.device.wlan_client_interfaces = wlan_interfaces["client"]
-        self.device.wlan_ap_interfaces = wlan_interfaces["ap"]
-
-        # Set test interfaces to value from config, else the first found
-        # interface, else None
-        self.device.wlan_client_test_interface_name = self.device.conf_data.get(
-            "wlan_client_test_interface",
-            next(iter(self.device.wlan_client_interfaces), None),
-        )
-
-        self.device.wlan_ap_test_interface_name = self.device.conf_data.get(
-            "wlan_ap_test_interface", next(iter(self.device.wlan_ap_interfaces), None)
-        )
-
-    def get_interfaces_by_role(self):
-        """Retrieves WLAN interface information, supplimented by netstack info.
-
-        Returns:
-            Dict with keys 'client' and 'ap', each of which contain WLAN
-            interfaces.
-        """
-
-        # Retrieve WLAN interface IDs
-        response = self.device.sl4f.wlan_lib.wlanGetIfaceIdList()
-        if response.get("error"):
-            raise WlanControllerError(
-                "Failed to get WLAN iface ids: %s" % response["error"]
-            )
-
-        wlan_iface_ids = response.get("result", [])
-        if len(wlan_iface_ids) < 1:
-            return {"client": {}, "ap": {}}
-
-        # Use IDs to get WLAN interface info and mac addresses
-        wlan_ifaces_by_mac = {}
-        for id in wlan_iface_ids:
-            response = self.device.sl4f.wlan_lib.wlanQueryInterface(id)
-            if response.get("error"):
-                raise WlanControllerError(
-                    "Failed to query wlan iface id %s: %s" % (id, response["error"])
-                )
-
-            mac = response["result"].get("sta_addr", None)
-            if mac is None:
-                # Fallback to older field name to maintain backwards
-                # compatibility with older versions of SL4F's
-                # QueryIfaceResponse. See https://fxrev.dev/562146.
-                mac = response["result"].get("mac_addr")
-
-            wlan_ifaces_by_mac[utils.mac_address_list_to_str(mac)] = response["result"]
-
-        # Use mac addresses to query the interfaces from the netstack view,
-        # which allows us to supplement the interface information with the name,
-        # netstack_id, etc.
-
-        # TODO(fxb/75909): This tedium is necessary to get the interface name
-        # because only netstack has that information. The bug linked here is
-        # to reconcile some of the information between the two perspectives, at
-        # which point we can eliminate step.
-        net_ifaces = self.device.netstack_controller.list_interfaces()
-        wlan_ifaces_by_role = {"client": {}, "ap": {}}
-        for iface in net_ifaces:
-            try:
-                # Some interfaces might not have a MAC
-                iface_mac = utils.mac_address_list_to_str(iface["mac"])
-            except Exception as e:
-                self.log.debug(f"Error {e} getting MAC for iface {iface}")
-                continue
-            if iface_mac in wlan_ifaces_by_mac:
-                wlan_ifaces_by_mac[iface_mac]["netstack_id"] = iface["id"]
-
-                # Add to return dict, mapped by role then name.
-                wlan_ifaces_by_role[wlan_ifaces_by_mac[iface_mac]["role"].lower()][
-                    iface["name"]
-                ] = wlan_ifaces_by_mac[iface_mac]
-
-        return wlan_ifaces_by_role
-
-    def set_country_code(self, country_code):
-        """Sets country code through the regulatory region service and waits
-        for the code to be applied to WLAN PHY.
-
-        Args:
-            country_code: string, the 2 character country code to set
-
-        Raises:
-            EnvironmentError - failure to get/set regulatory region
-            ConnectionError - failure to query PHYs
-        """
-        self.log.info("Setting DUT country code to %s" % country_code)
-        country_code_response = self.device.sl4f.regulatory_region_lib.setRegion(
-            country_code
-        )
-        if country_code_response.get("error"):
-            raise EnvironmentError(
-                "Failed to set country code (%s) on DUT. Error: %s"
-                % (country_code, country_code_response["error"])
-            )
-
-        self.log.info(
-            "Verifying DUT country code was correctly set to %s." % country_code
-        )
-        phy_ids_response = self.device.sl4f.wlan_lib.wlanPhyIdList()
-        if phy_ids_response.get("error"):
-            raise ConnectionError(
-                "Failed to get phy ids from DUT. Error: %s"
-                % (country_code, phy_ids_response["error"])
-            )
-
-        end_time = time.time() + TIME_TO_WAIT_FOR_COUNTRY_CODE
-        while time.time() < end_time:
-            for id in phy_ids_response["result"]:
-                get_country_response = self.device.sl4f.wlan_lib.wlanGetCountry(id)
-                if get_country_response.get("error"):
-                    raise ConnectionError(
-                        "Failed to query PHY ID (%s) for country. Error: %s"
-                        % (id, get_country_response["error"])
-                    )
-
-                set_code = "".join(
-                    [chr(ascii_char) for ascii_char in get_country_response["result"]]
-                )
-                if set_code != country_code:
-                    self.log.debug(
-                        "PHY (id: %s) has incorrect country code set. "
-                        "Expected: %s, Got: %s" % (id, country_code, set_code)
-                    )
-                    break
-            else:
-                self.log.info("All PHYs have expected country code (%s)" % country_code)
-                break
-            time.sleep(TIME_TO_SLEEP_BETWEEN_RETRIES)
-        else:
-            raise EnvironmentError(
-                "Failed to set DUT country code to %s." % country_code
-            )
diff --git a/src/antlion/controllers/fuchsia_lib/lib_controllers/wlan_policy_controller.py b/src/antlion/controllers/fuchsia_lib/lib_controllers/wlan_policy_controller.py
deleted file mode 100644
index 5ef126b..0000000
--- a/src/antlion/controllers/fuchsia_lib/lib_controllers/wlan_policy_controller.py
+++ /dev/null
@@ -1,616 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-
-from antlion import logger
-from antlion import signals
-
-from antlion.controllers.fuchsia_lib.sl4f import SL4F
-from antlion.controllers.fuchsia_lib.ssh import SSHProvider
-
-SAVED_NETWORKS = "saved_networks"
-CLIENT_STATE = "client_connections_state"
-CONNECTIONS_ENABLED = "ConnectionsEnabled"
-CONNECTIONS_DISABLED = "ConnectionsDisabled"
-
-STATE_CONNECTED = "Connected"
-STATE_CONNECTING = "Connecting"
-STATE_DISCONNECTED = "Disconnected"
-STATE_CONNECTION_STOPPED = "ConnectionStopped"
-
-SESSION_MANAGER_TIMEOUT_SEC = 10
-FUCHSIA_DEFAULT_WLAN_CONFIGURE_TIMEOUT = 30
-DEFAULT_GET_UPDATE_TIMEOUT = 60
-
-
-class WlanPolicyControllerError(signals.ControllerError):
-    pass
-
-
-class WlanPolicyController:
-    """Contains methods related to the wlan policy layer, to be used in the
-    FuchsiaDevice object.
-
-    Attributes:
-        sl4f: sl4f module for communicating to the WLAN policy controller.
-        ssh: transport to fuchsia device to stop component processes.
-    """
-
-    def __init__(self, sl4f: SL4F, ssh: SSHProvider):
-        """
-        Args:
-            sl4f: sl4f module for communicating to the WLAN policy controller.
-            ssh: transport to fuchsia device to stop component processes.
-        """
-        self.preserved_networks_and_client_state = None
-        self.policy_configured = False
-        self.sl4f = sl4f
-        self.ssh = ssh
-        self.log = logger.create_tagged_trace_logger(
-            f"WlanPolicyController | {self.ssh.config.host_name}"
-        )
-
-    def configure_wlan(
-        self,
-        preserve_saved_networks: bool,
-        timeout_sec: int = FUCHSIA_DEFAULT_WLAN_CONFIGURE_TIMEOUT,
-    ) -> None:
-        """Sets up wlan policy layer.
-
-        Args:
-            preserve_saved_networks: whether to clear existing saved
-                networks and client state, to be restored at test close.
-            timeout_sec: time to wait for device to configure WLAN.
-        """
-
-        # We need to stop session manager to free control of
-        # fuchsia.wlan.policy.ClientController, which can only be used by a
-        # single caller at a time. SL4F needs the ClientController to trigger
-        # WLAN policy state changes. On eng builds the session_manager can be
-        # restarted after being stopped during reboot so we attempt killing the
-        # session manager process for 10 seconds.
-        # See https://cs.opensource.google/fuchsia/fuchsia/+/main:sdk/fidl/fuchsia.wlan.policy/client_provider.fidl
-        if "cast_agent.cm" in self.ssh.run("ps").stdout:
-            end_time_session_manager_sec = time.time() + SESSION_MANAGER_TIMEOUT_SEC
-            while time.time() < end_time_session_manager_sec:
-                self.ssh.stop_component("session_manager", is_cfv2_component=True)
-
-        # Acquire control of policy layer
-        end_time_config_sec = time.time() + timeout_sec
-        controller_errors = []
-        while time.time() < end_time_config_sec:
-            # Create a client controller
-            response = self.sl4f.wlan_policy_lib.wlanCreateClientController()
-            if response.get("error"):
-                controller_errors.append(response["error"])
-                self.log.debug(response["error"])
-                time.sleep(1)
-                continue
-            break
-        else:
-            self.log.warning(
-                "Failed to create and use a WLAN policy client controller. Errors: ["
-                + "; ".join(controller_errors)
-                + "]"
-            )
-            raise WlanPolicyControllerError(
-                "Failed to create and use a WLAN policy client controller."
-            )
-
-        self.log.info("ACTS tests now have control of the WLAN policy layer.")
-
-        if preserve_saved_networks and not self.preserved_networks_and_client_state:
-            self.preserved_networks_and_client_state = (
-                self.remove_and_preserve_networks_and_client_state()
-            )
-        if not self.start_client_connections():
-            raise WlanPolicyControllerError(
-                "Failed to start client connections during configuration."
-            )
-
-        self.policy_configured = True
-
-    def _deconfigure_wlan(self):
-        if not self.stop_client_connections():
-            raise WlanPolicyControllerError(
-                "Failed to stop client connections during deconfiguration."
-            )
-        self.policy_configured = False
-
-    def clean_up(self) -> None:
-        if self.preserved_networks_and_client_state:
-            # It is possible for policy to have been configured before, but
-            # deconfigured before test end. In this case, in must be setup
-            # before restoring networks
-            if not self.policy_configured:
-                self.configure_wlan()
-            self.restore_preserved_networks_and_client_state()
-
-    def start_client_connections(self):
-        """Allow device to connect to networks via policy layer (including
-        autoconnecting to saved networks).
-
-        Returns:
-            True, if successful. False otherwise."""
-        start_response = self.sl4f.wlan_policy_lib.wlanStartClientConnections()
-        if start_response.get("error"):
-            self.log.error(
-                "Failed to start client connections. Err: %s" % start_response["error"]
-            )
-            return False
-        return True
-
-    def stop_client_connections(self):
-        """Prevent device from connecting and autoconnecting to networks via the
-        policy layer.
-
-        Returns:
-            True, if successful. False otherwise."""
-        stop_response = self.sl4f.wlan_policy_lib.wlanStopClientConnections()
-        if stop_response.get("error"):
-            self.log.error(
-                "Failed to stop client connections. Err: %s" % stop_response["error"]
-            )
-            return False
-        return True
-
-    def save_and_connect(self, ssid, security, password=None, timeout=30):
-        """Saves and connects to the network. This is the policy version of
-        connect and check_connect_response because the policy layer
-        requires a saved network and the policy connect does not return
-        success or failure
-
-        Args:
-            ssid: string, the network name
-            security: string, security type of network (see sl4f.wlan_policy_lib)
-            password: string, the credential of the network if applicable
-            timeout: int, time in seconds to wait for connection
-
-        Returns:
-            True, if successful. False otherwise.
-        """
-        # Save network and check response
-        if not self.save_network(ssid, security, password=password):
-            return False
-        # Make connect call and check response
-        self.sl4f.wlan_policy_lib.wlanSetNewListener()
-        if not self.send_connect_command(ssid, security):
-            return False
-        return self.wait_for_connect(ssid, security, timeout=timeout)
-
-    def save_and_wait_for_autoconnect(self, ssid, security, password=None, timeout=30):
-        """Saves a network and waits, expecting an autoconnection to the newly
-        saved network. This differes from save_and_connect, as it doesn't
-        expressly trigger a connection first. There are cases in which an
-        autoconnect won't occur after a save (like if the device is connected
-        already), so this should be used with caution to test very specific
-        situations.
-
-        Args:
-            ssid: string, the network name
-            security: string, security type of network (see sl4f.wlan_policy_lib)
-            password: string, the credential of the network if applicable
-            timeout: int, time in seconds to wait for connection
-
-        Returns:
-            True, if successful. False otherwise.
-        """
-        if not self.save_network(ssid, security, password=password):
-            return False
-        return self.wait_for_connect(ssid, security, timeout=timeout)
-
-    def remove_and_wait_for_disconnect(
-        self, ssid, security_type, password=None, state=None, status=None, timeout=30
-    ):
-        """Removes a single network and waits for a disconnect. It is not
-        guaranteed the device will stay disconnected, as it may autoconnect
-        to a different saved network.
-
-        Args:
-            ssid: string, the network name
-            security: string, security type of network (see sl4f.wlan_policy_lib)
-            password: string, the credential of the network if applicable
-            state: string, The connection state we are expecting, ie "Disconnected" or
-                "Failed"
-            status: string, The disconnect status we expect, it "ConnectionStopped" or
-                "ConnectionFailed"
-            timeout: int, time in seconds to wait for connection
-
-        Returns:
-            True, if successful. False otherwise.
-        """
-        self.sl4f.wlan_policy_lib.wlanSetNewListener()
-        if not self.remove_network(ssid, security_type, password=password):
-            return False
-        return self.wait_for_disconnect(
-            ssid, security_type, state=state, status=status, timeout=timeout
-        )
-
-    def remove_all_networks_and_wait_for_no_connections(
-        self, timeout_sec: int = DEFAULT_GET_UPDATE_TIMEOUT
-    ) -> bool:
-        """Removes all networks and waits until device is not connected to any
-        networks. This should be used as the policy version of disconnect.
-
-        Args:
-            timeout_sec: The time to wait to see no connections.
-
-        Returns:
-            True, if successful. False otherwise.
-        """
-        self.sl4f.wlan_policy_lib.wlanSetNewListener()
-        if not self.remove_all_networks():
-            self.log.error(
-                "Failed to remove all networks. Cannot continue to "
-                "wait_for_no_connections."
-            )
-            return False
-        return self.wait_for_no_connections(timeout_sec=timeout_sec)
-
-    def save_network(self, ssid, security_type, password=None):
-        """Save a network via the policy layer.
-
-        Args:
-            ssid: string, the network name
-            security: string, security type of network (see sl4f.wlan_policy_lib)
-            password: string, the credential of the network if applicable
-
-        Returns:
-            True, if successful. False otherwise.
-        """
-        save_response = self.sl4f.wlan_policy_lib.wlanSaveNetwork(
-            ssid, security_type, target_pwd=password
-        )
-        if save_response.get("error"):
-            self.log.error(
-                "Failed to save network %s with error: %s"
-                % (ssid, save_response["error"])
-            )
-            return False
-        return True
-
-    def remove_network(self, ssid, security_type, password=None):
-        """Remove a saved network via the policy layer.
-
-        Args:
-            ssid: string, the network name
-            security: string, security type of network (see sl4f.wlan_policy_lib)
-            password: string, the credential of the network if applicable
-
-        Returns:
-            True, if successful. False otherwise.
-        """
-        remove_response = self.sl4f.wlan_policy_lib.wlanRemoveNetwork(
-            ssid, security_type, target_pwd=password
-        )
-        if remove_response.get("error"):
-            self.log.error(
-                "Failed to remove network %s with error: %s"
-                % (ssid, remove_response["error"])
-            )
-            return False
-        return True
-
-    def remove_all_networks(self):
-        """Removes all saved networks from device.
-
-        Returns:
-            True, if successful. False otherwise.
-        """
-        remove_all_response = self.sl4f.wlan_policy_lib.wlanRemoveAllNetworks()
-        if remove_all_response.get("error"):
-            self.log.error(
-                "Error occurred removing all networks: %s"
-                % remove_all_response["error"]
-            )
-            return False
-        return True
-
-    def get_saved_networks(self):
-        """Retrieves saved networks from device.
-
-        Returns:
-            list of saved networks
-
-        Raises:
-            WlanPolicyControllerError, if retrieval fails.
-        """
-        saved_networks_response = self.sl4f.wlan_policy_lib.wlanGetSavedNetworks()
-        if saved_networks_response.get("error"):
-            raise WlanPolicyControllerError(
-                "Failed to retrieve saved networks: %s"
-                % saved_networks_response["error"]
-            )
-        return saved_networks_response["result"]
-
-    def send_connect_command(self, ssid, security_type):
-        """Sends a connect command to a network that is already saved. This does
-        not wait to guarantee the connection is successful (for that, use
-        save_and_connect).
-
-        Args:
-            ssid: string, the network name
-            security: string, security type of network (see sl4f.wlan_policy_lib)
-            password: string, the credential of the network if applicable
-
-        Returns:
-            True, if command send successfully. False otherwise.
-        """
-        connect_response = self.sl4f.wlan_policy_lib.wlanConnect(ssid, security_type)
-        if connect_response.get("error"):
-            self.log.error(
-                "Error occurred when sending policy connect command: %s"
-                % connect_response["error"]
-            )
-            return False
-        return True
-
-    def wait_for_connect(self, ssid, security_type, timeout=30):
-        """Wait until the device has connected to the specified network.
-        Args:
-            ssid: string, the network name
-            security: string, security type of network (see sl4f.wlan_policy_lib)
-            timeout: int, seconds to wait for a update showing connection
-        Returns:
-            True if we see a connect to the network, False otherwise.
-        """
-        security_type = str(security_type)
-        # Wait until we've connected.
-        end_time = time.time() + timeout
-        while time.time() < end_time:
-            time_left = max(1, int(end_time - time.time()))
-
-            try:
-                update = self.sl4f.wlan_policy_lib.wlanGetUpdate(timeout=time_left)
-            except TimeoutError:
-                self.log.error(
-                    "Timed out waiting for response from device "
-                    'while waiting for network with SSID "%s" to '
-                    "connect. Device took too long to connect or "
-                    "the request timed out for another reason." % ssid
-                )
-                self.sl4f.wlan_policy_lib.wlanSetNewListener()
-                return False
-            if update.get("error"):
-                # This can occur for many reasons, so it is not necessarily a
-                # failure.
-                self.log.debug(
-                    "Error occurred getting status update: %s" % update["error"]
-                )
-                continue
-
-            for network in update["result"]["networks"]:
-                if (
-                    network["id"]["ssid"] == ssid
-                    or network["id"]["type_"].lower() == security_type.lower()
-                ):
-                    if "state" not in network:
-                        raise WlanPolicyControllerError(
-                            "WLAN status missing state field."
-                        )
-                    elif network["state"].lower() == STATE_CONNECTED.lower():
-                        return True
-            # Wait a bit before requesting another status update
-            time.sleep(1)
-        # Stopped getting updates because out timeout
-        self.log.error(
-            'Timed out waiting for network with SSID "%s" to ' "connect" % ssid
-        )
-        return False
-
-    def wait_for_disconnect(
-        self, ssid, security_type, state=None, status=None, timeout=30
-    ):
-        """Wait for a disconnect of the specified network on the given device. This
-        will check that the correct connection state and disconnect status are
-        given in update. If we do not see a disconnect after some time,
-        return false.
-
-        Args:
-            ssid: string, the network name
-            security: string, security type of network (see sl4f.wlan_policy_lib)
-            state: string, The connection state we are expecting, ie "Disconnected" or
-                "Failed"
-            status: string, The disconnect status we expect, it "ConnectionStopped" or
-                "ConnectionFailed"
-            timeout: int, seconds to wait before giving up
-
-        Returns: True if we saw a disconnect as specified, or False otherwise.
-        """
-        if not state:
-            state = STATE_DISCONNECTED
-        if not status:
-            status = STATE_CONNECTION_STOPPED
-
-        end_time = time.time() + timeout
-        while time.time() < end_time:
-            time_left = max(1, int(end_time - time.time()))
-            try:
-                update = self.sl4f.wlan_policy_lib.wlanGetUpdate(timeout=time_left)
-            except TimeoutError:
-                self.log.error(
-                    "Timed out waiting for response from device "
-                    'while waiting for network with SSID "%s" to '
-                    "disconnect. Device took too long to disconnect "
-                    "or the request timed out for another reason." % ssid
-                )
-                self.sl4f.wlan_policy_lib.wlanSetNewListener()
-                return False
-
-            if update.get("error"):
-                # This can occur for many reasons, so it is not necessarily a
-                # failure.
-                self.log.debug(
-                    "Error occurred getting status update: %s" % update["error"]
-                )
-                continue
-            # Update should include network, either connected to or recently disconnected.
-            if len(update["result"]["networks"]) == 0:
-                raise WlanPolicyControllerError("WLAN state update is missing network.")
-
-            for network in update["result"]["networks"]:
-                if (
-                    network["id"]["ssid"] == ssid
-                    or network["id"]["type_"].lower() == security_type.lower()
-                ):
-                    if "state" not in network or "status" not in network:
-                        raise WlanPolicyControllerError(
-                            "Client state summary's network is missing fields"
-                        )
-                    # If still connected, we will wait for another update and check again
-                    elif network["state"].lower() == STATE_CONNECTED.lower():
-                        continue
-                    elif network["state"].lower() == STATE_CONNECTING.lower():
-                        self.log.error(
-                            'Update is "Connecting", but device should already be '
-                            "connected; expected disconnect"
-                        )
-                        return False
-                    # Check that the network state and disconnect status are expected, ie
-                    # that it isn't ConnectionFailed when we expect ConnectionStopped
-                    elif (
-                        network["state"].lower() != state.lower()
-                        or network["status"].lower() != status.lower()
-                    ):
-                        self.log.error(
-                            "Connection failed: a network failure occurred that is unrelated"
-                            "to remove network or incorrect status update. \nExpected state: "
-                            "%s, Status: %s,\nActual update: %s"
-                            % (state, status, network)
-                        )
-                        return False
-                    else:
-                        return True
-            # Wait a bit before requesting another status update
-            time.sleep(1)
-        # Stopped getting updates because out timeout
-        self.log.error(
-            'Timed out waiting for network with SSID "%s" to ' "connect" % ssid
-        )
-        return False
-
-    def wait_for_no_connections(
-        self, timeout_sec: int = DEFAULT_GET_UPDATE_TIMEOUT
-    ) -> bool:
-        """Waits to see that there are no existing connections the device. This
-        is the simplest way to watch for disconnections when only a single
-        network is saved/present.
-
-        Args:
-            timeout_sec: The time to wait to see no connections.
-
-        Returns:
-            True, if successful. False, if still connected after timeout.
-        """
-        # If there are already no existing connections when this function is called,
-        # then an update won't be generated by the device, and we'll time out.
-        # Force an update by getting a new listener.
-        self.sl4f.wlan_policy_lib.wlanSetNewListener()
-        end_time = time.time() + timeout_sec
-        while time.time() < end_time:
-            time_left = max(1, int(end_time - time.time()))
-            try:
-                update = self.sl4f.wlan_policy_lib.wlanGetUpdate(timeout=time_left)
-            except TimeoutError:
-                self.log.info(
-                    "Timed out getting status update while waiting for all"
-                    " connections to end."
-                )
-                self.sl4f.wlan_policy_lib.wlanSetNewListener()
-                return False
-
-            if update["error"] != None:
-                self.log.info("Failed to get status update")
-                return False
-            # If any network is connected or being connected to, wait for them
-            # to disconnect.
-            if any(
-                network["state"].lower()
-                in {STATE_CONNECTED.lower(), STATE_CONNECTING.lower()}
-                for network in update["result"]["networks"]
-            ):
-                continue
-            else:
-                return True
-        return False
-
-    def remove_and_preserve_networks_and_client_state(self):
-        """Preserves networks already saved on devices before removing them to
-        setup up for a clean test environment. Records the state of client
-        connections before tests.
-
-        Raises:
-            WlanPolicyControllerError, if the network removal is unsuccessful
-        """
-        # Save preexisting saved networks
-        preserved_networks_and_state = {}
-        saved_networks_response = self.sl4f.wlan_policy_lib.wlanGetSavedNetworks()
-        if saved_networks_response.get("error"):
-            raise WlanPolicyControllerError(
-                "Failed to get preexisting saved networks: %s"
-                % saved_networks_response["error"]
-            )
-        if saved_networks_response.get("result") != None:
-            preserved_networks_and_state[SAVED_NETWORKS] = saved_networks_response[
-                "result"
-            ]
-
-        # Remove preexisting saved networks
-        if not self.remove_all_networks():
-            raise WlanPolicyControllerError(
-                "Failed to clear networks and disconnect at FuchsiaDevice creation."
-            )
-
-        self.sl4f.wlan_policy_lib.wlanSetNewListener()
-        update_response = self.sl4f.wlan_policy_lib.wlanGetUpdate()
-        update_result = update_response.get("result", {})
-        if update_result.get("state"):
-            preserved_networks_and_state[CLIENT_STATE] = update_result["state"]
-        else:
-            self.log.warn(
-                "Failed to get update; test will not start or "
-                "stop client connections at the end of the test."
-            )
-
-        self.log.info("Saved networks cleared and preserved.")
-        return preserved_networks_and_state
-
-    def restore_preserved_networks_and_client_state(self):
-        """Restore saved networks and client state onto device if they have
-        been preserved.
-        """
-        if not self.remove_all_networks():
-            self.log.warn("Failed to remove saved networks before restore.")
-        restore_success = True
-        for network in self.preserved_networks_and_client_state[SAVED_NETWORKS]:
-            if not self.save_network(
-                network["ssid"], network["security_type"], network["credential_value"]
-            ):
-                self.log.warn("Failed to restore network (%s)." % network["ssid"])
-                restore_success = False
-        starting_state = self.preserved_networks_and_client_state[CLIENT_STATE]
-        if starting_state == CONNECTIONS_ENABLED:
-            state_restored = self.start_client_connections()
-        else:
-            state_restored = self.stop_client_connections()
-        if not state_restored:
-            self.log.warn("Failed to restore client connections state.")
-            restore_success = False
-        if restore_success:
-            self.log.info("Preserved networks and client state restored.")
-            self.preserved_networks_and_client_state = None
-        return restore_success
diff --git a/src/antlion/controllers/fuchsia_lib/location/__init__.py b/src/antlion/controllers/fuchsia_lib/location/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/fuchsia_lib/location/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/fuchsia_lib/location/regulatory_region_lib.py b/src/antlion/controllers/fuchsia_lib/location/regulatory_region_lib.py
deleted file mode 100644
index 54d9e44..0000000
--- a/src/antlion/controllers/fuchsia_lib/location/regulatory_region_lib.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-
-class FuchsiaRegulatoryRegionLib(BaseLib):
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "location_regulatory_region")
-
-    # TODO(fxb/46727): Provide an analagous call to check the region
-    # configured into the driver.
-    def setRegion(self, region_code):
-        """Set regulatory region.
-
-        Args:
-            region_code: 2-byte ASCII string.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "location_regulatory_region_facade.set_region"
-        test_args = {"region": region_code}
-
-        return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/logging_lib.py b/src/antlion/controllers/fuchsia_lib/logging_lib.py
deleted file mode 100644
index 83825c4..0000000
--- a/src/antlion/controllers/fuchsia_lib/logging_lib.py
+++ /dev/null
@@ -1,68 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import datetime
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-
-class FuchsiaLoggingLib(BaseLib):
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "logging")
-
-    def logE(self, message):
-        """Log a message of level Error directly to the syslog.
-
-        Args:
-            message: The message to log.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "logging_facade.LogErr"
-        test_args = {
-            "message": "[%s] %s" % (datetime.datetime.now(), message),
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def logI(self, message):
-        """Log a message of level Info directly to the syslog.
-
-        Args:
-            message: The message to log.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "logging_facade.LogInfo"
-        test_args = {"message": "[%s] %s" % (datetime.datetime.now(), message)}
-
-        return self.send_command(test_cmd, test_args)
-
-    def logW(self, message):
-        """Log a message of level Warning directly to the syslog.
-
-        Args:
-            message: The message to log.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "logging_facade.LogWarn"
-        test_args = {"message": "[%s] %s" % (datetime.datetime.now(), message)}
-
-        return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/netstack/__init__.py b/src/antlion/controllers/fuchsia_lib/netstack/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/antlion/controllers/fuchsia_lib/netstack/__init__.py
+++ /dev/null
diff --git a/src/antlion/controllers/fuchsia_lib/netstack/netstack_lib.py b/src/antlion/controllers/fuchsia_lib/netstack/netstack_lib.py
deleted file mode 100644
index 481e9bd..0000000
--- a/src/antlion/controllers/fuchsia_lib/netstack/netstack_lib.py
+++ /dev/null
@@ -1,61 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-
-class FuchsiaNetstackLib(BaseLib):
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "netstack")
-
-    def netstackListInterfaces(self):
-        """ListInterfaces command
-
-        Returns:
-            List of interface paths
-        """
-        test_cmd = "netstack_facade.ListInterfaces"
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def enableInterface(self, id):
-        """Enable Interface
-
-        Args:
-            id: The interface ID.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "netstack_facade.EnableInterface"
-        test_args = {"identifier": id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def disableInterface(self, id):
-        """Disable Interface
-
-        Args:
-            id: The interface ID.
-
-        Returns:
-            Dictionary, None if success, error if error.
-        """
-        test_cmd = "netstack_facade.DisableInterface"
-        test_args = {"identifier": id}
-
-        return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/sl4f.py b/src/antlion/controllers/fuchsia_lib/sl4f.py
deleted file mode 100644
index e051d7c..0000000
--- a/src/antlion/controllers/fuchsia_lib/sl4f.py
+++ /dev/null
@@ -1,130 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import ipaddress
-import sys
-
-from antlion import logger
-from antlion.controllers.fuchsia_lib.device_lib import DeviceLib
-from antlion.controllers.fuchsia_lib.hardware_power_statecontrol_lib import (
-    FuchsiaHardwarePowerStatecontrolLib,
-)
-from antlion.controllers.fuchsia_lib.location.regulatory_region_lib import (
-    FuchsiaRegulatoryRegionLib,
-)
-from antlion.controllers.fuchsia_lib.logging_lib import FuchsiaLoggingLib
-from antlion.controllers.fuchsia_lib.netstack.netstack_lib import FuchsiaNetstackLib
-from antlion.controllers.fuchsia_lib.ssh import SSHProvider, SSHError
-from antlion.controllers.fuchsia_lib.wlan_ap_policy_lib import FuchsiaWlanApPolicyLib
-from antlion.controllers.fuchsia_lib.wlan_deprecated_configuration_lib import (
-    FuchsiaWlanDeprecatedConfigurationLib,
-)
-from antlion.controllers.fuchsia_lib.wlan_lib import FuchsiaWlanLib
-from antlion.controllers.fuchsia_lib.wlan_policy_lib import FuchsiaWlanPolicyLib
-from antlion.net import wait_for_port
-
-DEFAULT_SL4F_PORT = 80
-START_SL4F_V2_CMD = "start_sl4f"
-
-
-class SL4F:
-    """Module for Fuchsia devices to interact with the SL4F tool.
-
-    Attributes:
-        ssh: SSHProvider transport to start and stop SL4F.
-        address: http address for SL4F server including SL4F port.
-        log: Logger for the device-specific instance of SL4F.
-    """
-
-    def __init__(self, ssh: SSHProvider, port: int = DEFAULT_SL4F_PORT) -> None:
-        """
-        Args:
-            ssh: SSHProvider transport to start and stop SL4F.
-            port: Port for the SL4F server to listen on.
-        """
-        host = ssh.config.host_name
-
-        if sys.version_info < (3, 9):
-            # TODO(http://b/261746355): Remove this if statement once the
-            # minimum Python version is 3.9 or newer.
-            ip = ipaddress.ip_address(host.split("%")[0])
-            if ip.version == 4:
-                self.address = f"http://{ip}:{port}"
-            elif ip.version == 6:
-                ip = ssh.config.host_name
-                self.address = f"http://[{ip}]:{port}"
-        else:
-            ip = ipaddress.ip_address(host)
-            if ip.version == 4:
-                self.address = f"http://{ip}:{port}"
-            elif ip.version == 6:
-                self.address = f"http://[{ip}]:{port}"
-
-        self.log = logger.create_tagged_trace_logger(f"SL4F | {self.address}")
-
-        try:
-            ssh.stop_component("sl4f")
-            ssh.run(START_SL4F_V2_CMD).stdout
-        except SSHError:
-            # TODO(fxbug.dev/99331) Remove support to run SL4F in CFv1 mode
-            # once ACTS no longer use images that comes with only CFv1 SL4F.
-            self.log.warn(
-                "Running SL4F in CFv1 mode, "
-                "this is deprecated for images built after 5/9/2022, "
-                "see https://fxbug.dev/77056 for more info."
-            )
-            ssh.stop_component("sl4f")
-            ssh.start_v1_component("sl4f")
-
-        try:
-            wait_for_port(host, port)
-            self.log.info("SL4F server is reachable")
-        except TimeoutError as e:
-            raise TimeoutError("SL4F server is unreachable") from e
-
-        self._init_libraries()
-
-    def _init_libraries(self) -> None:
-        # Grab commands from DeviceLib
-        self.device_lib = DeviceLib(self.address)
-
-        # Grab commands from FuchsiaHardwarePowerStatecontrolLib
-        self.hardware_power_statecontrol_lib = FuchsiaHardwarePowerStatecontrolLib(
-            self.address
-        )
-
-        # Grab commands from FuchsiaRegulatoryRegionLib
-        self.regulatory_region_lib = FuchsiaRegulatoryRegionLib(self.address)
-
-        # Grab commands from FuchsiaLoggingLib
-        self.logging_lib = FuchsiaLoggingLib(self.address)
-
-        # Grab commands from FuchsiaNetstackLib
-        self.netstack_lib = FuchsiaNetstackLib(self.address)
-
-        # Grab commands from FuchsiaWlanApPolicyLib
-        self.wlan_ap_policy_lib = FuchsiaWlanApPolicyLib(self.address)
-
-        # Grabs command from FuchsiaWlanDeprecatedConfigurationLib
-        self.wlan_deprecated_configuration_lib = FuchsiaWlanDeprecatedConfigurationLib(
-            self.address
-        )
-
-        # Grab commands from FuchsiaWlanLib
-        self.wlan_lib = FuchsiaWlanLib(self.address)
-
-        # Grab commands from FuchsiaWlanPolicyLib
-        self.wlan_policy_lib = FuchsiaWlanPolicyLib(self.address)
diff --git a/src/antlion/controllers/fuchsia_lib/utils_lib.py b/src/antlion/controllers/fuchsia_lib/utils_lib.py
deleted file mode 100644
index 1e1336a..0000000
--- a/src/antlion/controllers/fuchsia_lib/utils_lib.py
+++ /dev/null
@@ -1,239 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import logging
-import tarfile
-import tempfile
-import time
-
-from antlion import utils
-from antlion.libs.proc import job
-from antlion.utils import get_fuchsia_mdns_ipv6_address
-
-MDNS_LOOKUP_RETRY_MAX = 3
-FASTBOOT_TIMEOUT = 30
-FLASH_TIMEOUT_SEC = 60 * 5  # 5 minutes
-AFTER_FLASH_BOOT_TIME = 30
-WAIT_FOR_EXISTING_FLASH_TO_FINISH_SEC = 360
-PROCESS_CHECK_WAIT_TIME_SEC = 30
-
-FUCHSIA_SDK_URL = "gs://fuchsia-sdk/development"
-FUCHSIA_RELEASE_TESTING_URL = "gs://fuchsia-release-testing/images"
-
-
-def flash(fuchsia_device, use_ssh=False, fuchsia_reconnect_after_reboot_time=5):
-    """A function to flash, not pave, a fuchsia_device
-
-    Args:
-        fuchsia_device: An ACTS fuchsia_device
-
-    Returns:
-        True if successful.
-    """
-    if not fuchsia_device.authorized_file:
-        raise ValueError(
-            "A ssh authorized_file must be present in the "
-            "ACTS config to flash fuchsia_devices."
-        )
-    # This is the product type from the fx set command.
-    # Do 'fx list-products' to see options in Fuchsia source tree.
-    if not fuchsia_device.product_type:
-        raise ValueError(
-            "A product type must be specified to flash " "fuchsia_devices."
-        )
-    # This is the board type from the fx set command.
-    # Do 'fx list-boards' to see options in Fuchsia source tree.
-    if not fuchsia_device.board_type:
-        raise ValueError("A board type must be specified to flash " "fuchsia_devices.")
-    if not fuchsia_device.build_number:
-        fuchsia_device.build_number = "LATEST"
-    if not fuchsia_device.mdns_name:
-        raise ValueError(
-            "Either fuchsia_device mdns_name must be specified or "
-            "ip must be the mDNS name to be able to flash."
-        )
-
-    file_to_download = None
-    image_archive_path = None
-    image_path = None
-
-    if not fuchsia_device.specific_image:
-        product_build = fuchsia_device.product_type
-        if fuchsia_device.build_type:
-            product_build = f"{product_build}_{fuchsia_device.build_type}"
-        if "LATEST" in fuchsia_device.build_number:
-            sdk_version = "sdk"
-            if "LATEST_F" in fuchsia_device.build_number:
-                f_branch = fuchsia_device.build_number.split("LATEST_F", 1)[1]
-                sdk_version = f"f{f_branch}_sdk"
-            file_to_download = (
-                f"{FUCHSIA_RELEASE_TESTING_URL}/"
-                f"{sdk_version}-{product_build}.{fuchsia_device.board_type}-release.tgz"
-            )
-        else:
-            # Must be a fully qualified build number (e.g. 5.20210721.4.1215)
-            file_to_download = (
-                f"{FUCHSIA_SDK_URL}/{fuchsia_device.build_number}/images/"
-                f"{product_build}.{fuchsia_device.board_type}-release.tgz"
-            )
-    elif "gs://" in fuchsia_device.specific_image:
-        file_to_download = fuchsia_device.specific_image
-    elif os.path.isdir(fuchsia_device.specific_image):
-        image_path = fuchsia_device.specific_image
-    elif tarfile.is_tarfile(fuchsia_device.specific_image):
-        image_archive_path = fuchsia_device.specific_image
-    else:
-        raise ValueError(f'Invalid specific_image "{fuchsia_device.specific_image}"')
-
-    if image_path:
-        reboot_to_bootloader(
-            fuchsia_device, use_ssh, fuchsia_reconnect_after_reboot_time
-        )
-        logging.info(
-            f'Flashing {fuchsia_device.mdns_name} with {image_path} using authorized keys "{fuchsia_device.authorized_file}".'
-        )
-        run_flash_script(fuchsia_device, image_path)
-    else:
-        suffix = fuchsia_device.board_type
-        with tempfile.TemporaryDirectory(suffix=suffix) as image_path:
-            if file_to_download:
-                logging.info(f"Downloading {file_to_download} to {image_path}")
-                job.run(f"gsutil cp {file_to_download} {image_path}")
-                image_archive_path = os.path.join(
-                    image_path, os.path.basename(file_to_download)
-                )
-
-            if image_archive_path:
-                # Use tar command instead of tarfile.extractall, as it takes too long.
-                job.run(f"tar xfvz {image_archive_path} -C {image_path}", timeout=120)
-
-            reboot_to_bootloader(
-                fuchsia_device, use_ssh, fuchsia_reconnect_after_reboot_time
-            )
-
-            logging.info(
-                f'Flashing {fuchsia_device.mdns_name} with {image_archive_path} using authorized keys "{fuchsia_device.authorized_file}".'
-            )
-            run_flash_script(fuchsia_device, image_path)
-    return True
-
-
-def reboot_to_bootloader(
-    fuchsia_device, use_ssh=False, fuchsia_reconnect_after_reboot_time=5
-):
-    import psutil
-    import usbinfo
-    from antlion.controllers.fuchsia_lib.ssh import SSHError
-
-    if use_ssh:
-        logging.info("Sending reboot command via SSH to " "get into bootloader.")
-        # Sending this command will put the device in fastboot
-        # but it does not guarantee the device will be in fastboot
-        # after this command.  There is no check so if there is an
-        # expectation of the device being in fastboot, then some
-        # other check needs to be done.
-        try:
-            fuchsia_device.ssh.run(
-                "dm rb", timeout_sec=fuchsia_reconnect_after_reboot_time
-            )
-        except SSHError as e:
-            if "closed by remote host" not in e.result.stderr:
-                raise e
-    else:
-        pass
-        ## Todo: Add elif for SL4F if implemented in SL4F
-
-    time_counter = 0
-    while time_counter < FASTBOOT_TIMEOUT:
-        logging.info(
-            "Checking to see if fuchsia_device(%s) SN: %s is in "
-            "fastboot. (Attempt #%s Timeout: %s)"
-            % (
-                fuchsia_device.mdns_name,
-                fuchsia_device.serial_number,
-                str(time_counter + 1),
-                FASTBOOT_TIMEOUT,
-            )
-        )
-        for usb_device in usbinfo.usbinfo():
-            if (
-                usb_device["iSerialNumber"] == fuchsia_device.serial_number
-                and usb_device["iProduct"] == "USB_download_gadget"
-            ):
-                logging.info(
-                    "fuchsia_device(%s) SN: %s is in fastboot."
-                    % (fuchsia_device.mdns_name, fuchsia_device.serial_number)
-                )
-                time_counter = FASTBOOT_TIMEOUT
-        time_counter = time_counter + 1
-        if time_counter == FASTBOOT_TIMEOUT:
-            for fail_usb_device in usbinfo.usbinfo():
-                logging.debug(fail_usb_device)
-            raise TimeoutError(
-                "fuchsia_device(%s) SN: %s "
-                "never went into fastboot"
-                % (fuchsia_device.mdns_name, fuchsia_device.serial_number)
-            )
-        time.sleep(1)
-
-    end_time = time.time() + WAIT_FOR_EXISTING_FLASH_TO_FINISH_SEC
-    # Attempt to wait for existing flashing process to finish
-    while time.time() < end_time:
-        flash_process_found = False
-        for proc in psutil.process_iter():
-            if "bash" in proc.name() and "flash.sh" in proc.cmdline():
-                logging.info("Waiting for existing flash.sh process to complete.")
-                time.sleep(PROCESS_CHECK_WAIT_TIME_SEC)
-                flash_process_found = True
-        if not flash_process_found:
-            break
-
-
-def run_flash_script(fuchsia_device, flash_dir):
-    try:
-        flash_output = job.run(
-            f"bash {flash_dir}/flash.sh --ssh-key={fuchsia_device.authorized_file} -s {fuchsia_device.serial_number}",
-            timeout=FLASH_TIMEOUT_SEC,
-        )
-        logging.debug(flash_output.stderr)
-    except job.TimeoutError as err:
-        raise TimeoutError(err)
-
-    logging.info(
-        "Waiting %s seconds for device"
-        " to come back up after flashing." % AFTER_FLASH_BOOT_TIME
-    )
-    time.sleep(AFTER_FLASH_BOOT_TIME)
-    logging.info("Updating device to new IP addresses.")
-    mdns_ip = None
-    for retry_counter in range(MDNS_LOOKUP_RETRY_MAX):
-        mdns_ip = get_fuchsia_mdns_ipv6_address(fuchsia_device.mdns_name)
-        if mdns_ip:
-            break
-        else:
-            time.sleep(1)
-    if mdns_ip and utils.is_valid_ipv6_address(mdns_ip):
-        logging.info(
-            "IP for fuchsia_device(%s) changed from %s to %s"
-            % (fuchsia_device.mdns_name, fuchsia_device.ip, mdns_ip)
-        )
-        fuchsia_device.ip = mdns_ip
-        fuchsia_device.address = "http://[{}]:{}".format(
-            fuchsia_device.ip, fuchsia_device.sl4f_port
-        )
-    else:
-        raise ValueError("Invalid IP: %s after flashing." % fuchsia_device.mdns_name)
diff --git a/src/antlion/controllers/fuchsia_lib/wlan_ap_policy_lib.py b/src/antlion/controllers/fuchsia_lib/wlan_ap_policy_lib.py
deleted file mode 100644
index 54486d9..0000000
--- a/src/antlion/controllers/fuchsia_lib/wlan_ap_policy_lib.py
+++ /dev/null
@@ -1,110 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-
-class FuchsiaWlanApPolicyLib(BaseLib):
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "wlan_ap_policy")
-
-    def wlanStartAccessPoint(
-        self, target_ssid, security_type, target_pwd, connectivity_mode, operating_band
-    ):
-        """Start an Access Point.
-        Args:
-            target_ssid: the network to attempt a connection to
-            security_type: the security protocol of the network. Possible inputs:
-                "none", "wep", "wpa", "wpa2", "wpa3"
-            target_pwd: (optional) credential being saved with the network. No password
-                        is equivalent to empty string.
-            connectivity_mode: the connectivity mode to use. Possible inputs:
-                "local_only", "unrestricted"
-            operating_band: The operating band to use. Possible inputs:
-                "any", "only_2_4_ghz", "only_5_ghz"
-
-        Returns:
-            boolean indicating if the action was successful
-        """
-
-        test_cmd = "wlan_ap_policy.start_access_point"
-
-        test_args = {
-            "target_ssid": target_ssid,
-            "security_type": security_type.lower(),
-            "target_pwd": target_pwd,
-            "connectivity_mode": connectivity_mode,
-            "operating_band": operating_band,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def wlanStopAccessPoint(self, target_ssid, security_type, target_pwd=""):
-        """Stops an active Access Point.
-        Args:
-            target_ssid: the network to attempt a connection to
-            security_type: the security protocol of the network
-            target_pwd: (optional) credential being saved with the network. No password
-                        is equivalent to empty string.
-
-        Returns:
-            boolean indicating if the action was successful
-        """
-
-        test_cmd = "wlan_ap_policy.stop_access_point"
-
-        test_args = {
-            "target_ssid": target_ssid,
-            "security_type": security_type.lower(),
-            "target_pwd": target_pwd,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def wlanStopAllAccessPoint(self):
-        """Stops all Access Points
-
-        Returns:
-            boolean indicating if the actions were successful
-        """
-
-        test_cmd = "wlan_ap_policy.stop_all_access_points"
-
-        test_args = {}
-
-        return self.send_command(test_cmd, test_args)
-
-    def wlanSetNewListener(self):
-        """Sets the update listener stream of the facade to a new stream so that updates will be
-        reset. Intended to be used between tests so that the behaviour of updates in a test is
-        independent from previous tests.
-        """
-        test_cmd = "wlan_ap_policy.set_new_update_listener"
-
-        return self.send_command(test_cmd, {})
-
-    def wlanGetUpdate(self, timeout=30):
-        """Gets a list of AP state updates. This call will return with an update immediately the
-        first time the update listener is initialized by setting a new listener or by creating
-        a client controller before setting a new listener. Subsequent calls will hang until
-        there is an update.
-        Returns:
-            A list of AP state updated. If there is no error, the result is a list with a
-            structure that matches the FIDL AccessPointState struct given for updates.
-        """
-        test_cmd = "wlan_ap_policy.get_update"
-
-        return self.send_command(test_cmd, {}, response_timeout=timeout)
diff --git a/src/antlion/controllers/fuchsia_lib/wlan_lib.py b/src/antlion/controllers/fuchsia_lib/wlan_lib.py
deleted file mode 100644
index 9ed274a..0000000
--- a/src/antlion/controllers/fuchsia_lib/wlan_lib.py
+++ /dev/null
@@ -1,173 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-from antlion import logger
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-COMMAND_SCAN = "wlan.scan"
-COMMAND_SCAN_FOR_BSS_INFO = "wlan.scan_for_bss_info"
-COMMAND_CONNECT = "wlan.connect"
-COMMAND_DISCONNECT = "wlan.disconnect"
-COMMAND_STATUS = "wlan.status"
-COMMAND_GET_IFACE_ID_LIST = "wlan.get_iface_id_list"
-COMMAND_GET_PHY_ID_LIST = "wlan.get_phy_id_list"
-COMMAND_DESTROY_IFACE = "wlan.destroy_iface"
-COMMAND_GET_COUNTRY = "wlan_phy.get_country"
-COMMAND_GET_DEV_PATH = "wlan_phy.get_dev_path"
-COMMAND_QUERY_IFACE = "wlan.query_iface"
-
-
-class FuchsiaWlanLib(BaseLib):
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "wlan")
-
-    def wlanStartScan(self):
-        """Starts a wlan scan
-
-        Returns:
-            scan results
-        """
-        test_cmd = COMMAND_SCAN
-
-        return self.send_command(test_cmd, {})
-
-    def wlanScanForBSSInfo(self):
-        """Scans and returns BSS info
-
-        Returns:
-            A dict mapping each seen SSID to a list of BSS Description IE
-            blocks, one for each BSS observed in the network
-        """
-        test_cmd = COMMAND_SCAN_FOR_BSS_INFO
-
-        return self.send_command(test_cmd, {})
-
-    def wlanConnectToNetwork(self, target_ssid, target_bss_desc, target_pwd=None):
-        """Triggers a network connection
-        Args:
-            target_ssid: the network to attempt a connection to
-            target_pwd: (optional) password for the target network
-
-        Returns:
-            boolean indicating if the connection was successful
-        """
-        test_cmd = COMMAND_CONNECT
-        test_args = {
-            "target_ssid": target_ssid,
-            "target_pwd": target_pwd,
-            "target_bss_desc": target_bss_desc,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def wlanDisconnect(self):
-        """Disconnect any current wifi connections"""
-        test_cmd = COMMAND_DISCONNECT
-
-        return self.send_command(test_cmd, {})
-
-    def wlanDestroyIface(self, iface_id):
-        """Destroy WLAN interface by ID.
-        Args:
-            iface_id: the interface id.
-
-        Returns:
-            Dictionary, service id if success, error if error.
-        """
-        test_cmd = COMMAND_DESTROY_IFACE
-        test_args = {"identifier": iface_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def wlanGetIfaceIdList(self):
-        """Get a list if wlan interface IDs.
-
-        Returns:
-            Dictionary, service id if success, error if error.
-        """
-        test_cmd = COMMAND_GET_IFACE_ID_LIST
-
-        return self.send_command(test_cmd, {})
-
-    def wlanPhyIdList(self):
-        """Get a list if wlan phy IDs.
-
-        Returns:
-            List of IDs if success, error if error.
-        """
-        test_cmd = COMMAND_GET_PHY_ID_LIST
-
-        return self.send_command(test_cmd, {})
-
-    def wlanStatus(self, iface_id=None):
-        """Request connection status
-
-        Args:
-            iface_id: unsigned 16-bit int, the wlan interface id
-                (defaults to None)
-
-        Returns:
-            Client state summary containing WlanClientState and
-            status of various networks connections
-        """
-        test_cmd = COMMAND_STATUS
-        test_args = {}
-        if iface_id:
-            test_args = {"iface_id": iface_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def wlanGetCountry(self, phy_id):
-        """Reads the currently configured country for `phy_id`.
-
-        Args:
-            phy_id: unsigned 16-bit integer.
-
-        Returns:
-            Dictionary, String if success, error if error.
-        """
-        test_cmd = COMMAND_GET_COUNTRY
-        test_args = {"phy_id": phy_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def wlanGetDevPath(self, phy_id):
-        """Queries the device path for `phy_id`.
-
-        Args:
-            phy_id: unsigned 16-bit integer.
-
-        Returns:
-            Dictionary, String if success, error if error.
-        """
-        test_cmd = COMMAND_GET_DEV_PATH
-        test_args = {"phy_id": phy_id}
-
-        return self.send_command(test_cmd, test_args)
-
-    def wlanQueryInterface(self, iface_id):
-        """Retrieves interface info for given wlan iface id.
-
-        Args:
-            iface_id: unsigned 16-bit int, the wlan interface id.
-
-        Returns:
-            Dictionary, containing interface id, role, phy_id, phy_assigned_id
-            and mac addr.
-        """
-        test_cmd = COMMAND_QUERY_IFACE
-        test_args = {"iface_id": iface_id}
-
-        return self.send_command(test_cmd, test_args)
diff --git a/src/antlion/controllers/fuchsia_lib/wlan_policy_lib.py b/src/antlion/controllers/fuchsia_lib/wlan_policy_lib.py
deleted file mode 100644
index 94701d7..0000000
--- a/src/antlion/controllers/fuchsia_lib/wlan_policy_lib.py
+++ /dev/null
@@ -1,182 +0,0 @@
-# Lint as: python3
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-#   This class provides pipeline betweem python tests and WLAN policy facade.
-
-from antlion import logger
-from antlion.controllers.fuchsia_lib.base_lib import BaseLib
-
-COMMAND_START_CLIENT_CONNECTIONS = "wlan_policy.start_client_connections"
-COMMAND_STOP_CLIENT_CONNECTIONS = "wlan_policy.stop_client_connections"
-COMMAND_SCAN_FOR_NETWORKS = "wlan_policy.scan_for_networks"
-COMMAND_SAVE_NETWORK = "wlan_policy.save_network"
-COMMAND_REMOVE_NETWORK = "wlan_policy.remove_network"
-COMMAND_REMOVE_ALL_NETWORKS = "wlan_policy.remove_all_networks"
-COMMAND_GET_SAVED_NETWORKS = "wlan_policy.get_saved_networks"
-COMMAND_CONNECT = "wlan_policy.connect"
-COMMAND_CREATE_CLIENT_CONTROLLER = "wlan_policy.create_client_controller"
-COMMAND_SET_NEW_LISTENER = "wlan_policy.set_new_update_listener"
-COMMAND_REMOVE_ALL_NETWORKS = "wlan_policy.remove_all_networks"
-COMMAND_GET_UPDATE = "wlan_policy.get_update"
-
-
-class FuchsiaWlanPolicyLib(BaseLib):
-    def __init__(self, addr: str) -> None:
-        super().__init__(addr, "wlan_policy")
-
-    def wlanStartClientConnections(self):
-        """Enables device to initiate connections to networks"""
-
-        test_cmd = COMMAND_START_CLIENT_CONNECTIONS
-
-        return self.send_command(test_cmd, {})
-
-    def wlanStopClientConnections(self):
-        """Disables device for initiating connections to networks"""
-
-        test_cmd = COMMAND_STOP_CLIENT_CONNECTIONS
-
-        return self.send_command(test_cmd, {})
-
-    def wlanScanForNetworks(self):
-        """Scans for networks that can be connected to
-        Returns:
-            A list of network names and security types
-        """
-
-        test_cmd = COMMAND_SCAN_FOR_NETWORKS
-
-        return self.send_command(test_cmd, {})
-
-    def wlanSaveNetwork(self, target_ssid, security_type, target_pwd=None):
-        """Saveds a network to the device for future connections
-        Args:
-            target_ssid: the network to attempt a connection to
-            security_type: the security protocol of the network
-            target_pwd: (optional) credential being saved with the network. No password
-                        is equivalent to empty string.
-
-        Returns:
-            boolean indicating if the connection was successful
-        """
-        if not target_pwd:
-            target_pwd = ""
-        test_cmd = COMMAND_SAVE_NETWORK
-        test_args = {
-            "target_ssid": target_ssid,
-            "security_type": str(security_type).lower(),
-            "target_pwd": target_pwd,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def wlanRemoveNetwork(self, target_ssid, security_type, target_pwd=None):
-        """Removes or "forgets" a network from saved networks
-        Args:
-            target_ssid: the network to attempt a connection to
-            security_type: the security protocol of the network
-            target_pwd: (optional) credential of the network to remove. No password and
-                        empty string are equivalent.
-        """
-        if not target_pwd:
-            target_pwd = ""
-        test_cmd = COMMAND_REMOVE_NETWORK
-        test_args = {
-            "target_ssid": target_ssid,
-            "security_type": str(security_type).lower(),
-            "target_pwd": target_pwd,
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def wlanRemoveAllNetworks(self):
-        """Removes or "forgets" all networks from saved networks
-        Returns:
-            A boolean indicating if the action was successful
-        """
-
-        test_cmd = COMMAND_REMOVE_ALL_NETWORKS
-
-        return self.send_command(test_cmd, {})
-
-    def wlanGetSavedNetworks(self):
-        """Gets networks saved on device. Any PSK of a saved network will be
-        lower case regardless of how it was saved.
-            Returns:
-                A list of saved network names and security protocols
-        """
-
-        test_cmd = COMMAND_GET_SAVED_NETWORKS
-
-        return self.send_command(test_cmd, {})
-
-    def wlanConnect(self, target_ssid, security_type):
-        """Triggers connection to a network
-            Args:
-                target_ssid: the network to attempt a connection to. Must have been previously
-                             saved in order for a successful connection to happen.
-                security_type: the security protocol of the network
-
-        Returns:
-                boolean indicating if the connection was successful
-        """
-
-        test_cmd = COMMAND_CONNECT
-        test_args = {
-            "target_ssid": target_ssid,
-            "security_type": str(security_type).lower(),
-        }
-
-        return self.send_command(test_cmd, test_args)
-
-    def wlanCreateClientController(self):
-        """Initializes the client controller of the facade that is used to make Client Controller
-        API calls
-        """
-        test_cmd = COMMAND_CREATE_CLIENT_CONTROLLER
-
-        return self.send_command(test_cmd, {})
-
-    def wlanSetNewListener(self):
-        """Sets the update listener stream of the facade to a new stream so that updates will be
-        reset. Intended to be used between tests so that the behaviour of updates in a test is
-        independent from previous tests.
-        """
-        test_cmd = COMMAND_SET_NEW_LISTENER
-
-        return self.send_command(test_cmd, {})
-
-    def wlanRemoveAllNetworks(self):
-        """Deletes all saved networks on the device. Relies directly on the get_saved_networks and
-        remove_network commands
-        """
-        test_cmd = COMMAND_REMOVE_ALL_NETWORKS
-
-        return self.send_command(test_cmd, {})
-
-    def wlanGetUpdate(self, timeout=30):
-        """Gets one client listener update. This call will return with an update immediately the
-        first time the update listener is initialized by setting a new listener or by creating
-        a client controller before setting a new listener. Subsequent calls will hang until
-        there is an update.
-        Returns:
-            An update of connection status. If there is no error, the result is a dict with a
-            structure that matches the FIDL ClientStateSummary struct given for updates.
-        """
-        test_cmd = COMMAND_GET_UPDATE
-
-        return self.send_command(test_cmd, {}, response_timeout=timeout)
diff --git a/src/antlion/controllers/iperf_server.py b/src/antlion/controllers/iperf_server.py
deleted file mode 100755
index 20dcfbf..0000000
--- a/src/antlion/controllers/iperf_server.py
+++ /dev/null
@@ -1,736 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import logging
-import math
-import os
-import shlex
-import subprocess
-import threading
-import time
-
-from antlion import context
-from antlion import logger as acts_logger
-from antlion import utils
-from antlion.controllers.android_device import AndroidDevice
-from antlion.controllers.utils_lib.ssh import connection
-from antlion.controllers.utils_lib.ssh import settings
-from antlion.event import event_bus
-from antlion.event.decorators import subscribe_static
-from antlion.event.event import TestClassBeginEvent
-from antlion.event.event import TestClassEndEvent
-from antlion.libs.proc import job
-
-MOBLY_CONTROLLER_CONFIG_NAME = "IPerfServer"
-ACTS_CONTROLLER_REFERENCE_NAME = "iperf_servers"
-KILOBITS = 1024
-MEGABITS = KILOBITS * 1024
-GIGABITS = MEGABITS * 1024
-BITS_IN_BYTE = 8
-
-
-def create(configs):
-    """Factory method for iperf servers.
-
-    The function creates iperf servers based on at least one config.
-    If configs only specify a port number, a regular local IPerfServer object
-    will be created. If configs contains ssh settings or and AndroidDevice,
-    remote iperf servers will be started on those devices
-
-    Args:
-        configs: config parameters for the iperf server
-    """
-    results = []
-    for c in configs:
-        if type(c) in (str, int) and str(c).isdigit():
-            results.append(IPerfServer(int(c)))
-        elif type(c) is dict and "AndroidDevice" in c and "port" in c:
-            results.append(IPerfServerOverAdb(c["AndroidDevice"], c["port"]))
-        elif type(c) is dict and "ssh_config" in c and "port" in c:
-            results.append(
-                IPerfServerOverSsh(
-                    settings.from_config(c["ssh_config"]),
-                    c["port"],
-                    test_interface=c.get("test_interface"),
-                    use_killall=c.get("use_killall"),
-                )
-            )
-        else:
-            raise ValueError(
-                "Config entry %s in %s is not a valid IPerfServer "
-                "config." % (repr(c), configs)
-            )
-    return results
-
-
-def get_info(iperf_servers):
-    """Placeholder for info about iperf servers
-
-    Returns:
-        None
-    """
-    return None
-
-
-def destroy(iperf_server_list):
-    for iperf_server in iperf_server_list:
-        try:
-            iperf_server.stop()
-        except Exception:
-            logging.exception("Unable to properly clean up %s." % iperf_server)
-
-
-class IPerfResult(object):
-    def __init__(self, result_path, reporting_speed_units="Mbytes"):
-        """Loads iperf result from file.
-
-        Loads iperf result from JSON formatted server log. File can be accessed
-        before or after server is stopped. Note that only the first JSON object
-        will be loaded and this funtion is not intended to be used with files
-        containing multiple iperf client runs.
-        """
-        # if result_path isn't a path, treat it as JSON
-        self.reporting_speed_units = reporting_speed_units
-        if not os.path.exists(result_path):
-            self.result = json.loads(result_path)
-        else:
-            try:
-                with open(result_path, "r") as f:
-                    iperf_output = f.readlines()
-                    if "}\n" in iperf_output:
-                        iperf_output = iperf_output[: iperf_output.index("}\n") + 1]
-                    iperf_string = "".join(iperf_output)
-                    iperf_string = iperf_string.replace("nan", "0")
-                    self.result = json.loads(iperf_string)
-            except ValueError:
-                with open(result_path, "r") as f:
-                    # Possibly a result from interrupted iperf run,
-                    # skip first line and try again.
-                    lines = f.readlines()[1:]
-                    self.result = json.loads("".join(lines))
-
-    def _has_data(self):
-        """Checks if the iperf result has valid throughput data.
-
-        Returns:
-            True if the result contains throughput data. False otherwise.
-        """
-        return ("end" in self.result) and (
-            "sum_received" in self.result["end"] or "sum" in self.result["end"]
-        )
-
-    def _get_reporting_speed(self, network_speed_in_bits_per_second):
-        """Sets the units for the network speed reporting based on how the
-        object was initiated.  Defaults to Megabytes per second.  Currently
-        supported, bits per second (bits), kilobits per second (kbits), megabits
-        per second (mbits), gigabits per second (gbits), bytes per second
-        (bytes), kilobits per second (kbytes), megabits per second (mbytes),
-        gigabytes per second (gbytes).
-
-        Args:
-            network_speed_in_bits_per_second: The network speed from iperf in
-                bits per second.
-
-        Returns:
-            The value of the throughput in the appropriate units.
-        """
-        speed_divisor = 1
-        if self.reporting_speed_units[1:].lower() == "bytes":
-            speed_divisor = speed_divisor * BITS_IN_BYTE
-        if self.reporting_speed_units[0:1].lower() == "k":
-            speed_divisor = speed_divisor * KILOBITS
-        if self.reporting_speed_units[0:1].lower() == "m":
-            speed_divisor = speed_divisor * MEGABITS
-        if self.reporting_speed_units[0:1].lower() == "g":
-            speed_divisor = speed_divisor * GIGABITS
-        return network_speed_in_bits_per_second / speed_divisor
-
-    def get_json(self):
-        """Returns the raw json output from iPerf."""
-        return self.result
-
-    @property
-    def error(self):
-        return self.result.get("error", None)
-
-    @property
-    def avg_rate(self):
-        """Average UDP rate in MB/s over the entire run.
-
-        This is the average UDP rate observed at the terminal the iperf result
-        is pulled from. According to iperf3 documentation this is calculated
-        based on bytes sent and thus is not a good representation of the
-        quality of the link. If the result is not from a success run, this
-        property is None.
-        """
-        if not self._has_data() or "sum" not in self.result["end"]:
-            return None
-        bps = self.result["end"]["sum"]["bits_per_second"]
-        return self._get_reporting_speed(bps)
-
-    @property
-    def avg_receive_rate(self):
-        """Average receiving rate in MB/s over the entire run.
-
-        This data may not exist if iperf was interrupted. If the result is not
-        from a success run, this property is None.
-        """
-        if not self._has_data() or "sum_received" not in self.result["end"]:
-            return None
-        bps = self.result["end"]["sum_received"]["bits_per_second"]
-        return self._get_reporting_speed(bps)
-
-    @property
-    def avg_send_rate(self):
-        """Average sending rate in MB/s over the entire run.
-
-        This data may not exist if iperf was interrupted. If the result is not
-        from a success run, this property is None.
-        """
-        if not self._has_data() or "sum_sent" not in self.result["end"]:
-            return None
-        bps = self.result["end"]["sum_sent"]["bits_per_second"]
-        return self._get_reporting_speed(bps)
-
-    @property
-    def instantaneous_rates(self):
-        """Instantaneous received rate in MB/s over entire run.
-
-        This data may not exist if iperf was interrupted. If the result is not
-        from a success run, this property is None.
-        """
-        if not self._has_data():
-            return None
-        intervals = [
-            self._get_reporting_speed(interval["sum"]["bits_per_second"])
-            for interval in self.result["intervals"]
-        ]
-        return intervals
-
-    @property
-    def std_deviation(self):
-        """Standard deviation of rates in MB/s over entire run.
-
-        This data may not exist if iperf was interrupted. If the result is not
-        from a success run, this property is None.
-        """
-        return self.get_std_deviation(0)
-
-    def get_std_deviation(self, iperf_ignored_interval):
-        """Standard deviation of rates in MB/s over entire run.
-
-        This data may not exist if iperf was interrupted. If the result is not
-        from a success run, this property is None. A configurable number of
-        beginning (and the single last) intervals are ignored in the
-        calculation as they are inaccurate (e.g. the last is from a very small
-        interval)
-
-        Args:
-            iperf_ignored_interval: number of iperf interval to ignored in
-            calculating standard deviation
-
-        Returns:
-            The standard deviation.
-        """
-        if not self._has_data():
-            return None
-        instantaneous_rates = self.instantaneous_rates[iperf_ignored_interval:-1]
-        avg_rate = math.fsum(instantaneous_rates) / len(instantaneous_rates)
-        sqd_deviations = [(rate - avg_rate) ** 2 for rate in instantaneous_rates]
-        std_dev = math.sqrt(math.fsum(sqd_deviations) / (len(sqd_deviations) - 1))
-        return std_dev
-
-
-class IPerfServerBase(object):
-    # Keeps track of the number of IPerfServer logs to prevent file name
-    # collisions.
-    __log_file_counter = 0
-
-    __log_file_lock = threading.Lock()
-
-    def __init__(self, port):
-        self._port = port
-        # TODO(markdr): We shouldn't be storing the log files in an array like
-        # this. Nobody should be reading this property either. Instead, the
-        # IPerfResult should be returned in stop() with all the necessary info.
-        # See aosp/1012824 for a WIP implementation.
-        self.log_files = []
-
-    @property
-    def port(self):
-        raise NotImplementedError("port must be specified.")
-
-    @property
-    def started(self):
-        raise NotImplementedError("started must be specified.")
-
-    def start(self, extra_args="", tag=""):
-        """Starts an iperf3 server.
-
-        Args:
-            extra_args: A string representing extra arguments to start iperf
-                server with.
-            tag: Appended to log file name to identify logs from different
-                iperf runs.
-        """
-        raise NotImplementedError("start() must be specified.")
-
-    def stop(self):
-        """Stops the iperf server.
-
-        Returns:
-            The name of the log file generated from the terminated session.
-        """
-        raise NotImplementedError("stop() must be specified.")
-
-    def _get_full_file_path(self, tag=None):
-        """Returns the full file path for the IPerfServer log file.
-
-        Note: If the directory for the file path does not exist, it will be
-        created.
-
-        Args:
-            tag: The tag passed in to the server run.
-        """
-        out_dir = self.log_path
-
-        with IPerfServerBase.__log_file_lock:
-            tags = [tag, IPerfServerBase.__log_file_counter]
-            out_file_name = "IPerfServer,%s.log" % (
-                ",".join([str(x) for x in tags if x != "" and x is not None])
-            )
-            IPerfServerBase.__log_file_counter += 1
-
-        file_path = os.path.join(out_dir, out_file_name)
-        self.log_files.append(file_path)
-        return file_path
-
-    @property
-    def log_path(self):
-        current_context = context.get_current_context()
-        full_out_dir = os.path.join(
-            current_context.get_full_output_path(), "IPerfServer%s" % self.port
-        )
-
-        # Ensure the directory exists.
-        os.makedirs(full_out_dir, exist_ok=True)
-
-        return full_out_dir
-
-
-def _get_port_from_ss_output(ss_output, pid):
-    pid = str(pid)
-    lines = ss_output.split("\n")
-    for line in lines:
-        if pid in line:
-            # Expected format:
-            # tcp LISTEN  0 5 *:<PORT>  *:* users:(("cmd",pid=<PID>,fd=3))
-            return line.split()[4].split(":")[-1]
-    else:
-        raise ProcessLookupError("Could not find started iperf3 process.")
-
-
-class IPerfServer(IPerfServerBase):
-    """Class that handles iperf server commands on localhost."""
-
-    def __init__(self, port=5201):
-        super().__init__(port)
-        self._hinted_port = port
-        self._current_log_file = None
-        self._iperf_process = None
-        self._last_opened_file = None
-
-    @property
-    def port(self):
-        return self._port
-
-    @property
-    def started(self):
-        return self._iperf_process is not None
-
-    def start(self, extra_args="", tag=""):
-        """Starts iperf server on local machine.
-
-        Args:
-            extra_args: A string representing extra arguments to start iperf
-                server with.
-            tag: Appended to log file name to identify logs from different
-                iperf runs.
-        """
-        if self._iperf_process is not None:
-            return
-
-        self._current_log_file = self._get_full_file_path(tag)
-
-        # Run an iperf3 server on the hinted port with JSON output.
-        command = ["iperf3", "-s", "-p", str(self._hinted_port), "-J"]
-
-        command.extend(shlex.split(extra_args))
-
-        if self._last_opened_file:
-            self._last_opened_file.close()
-        self._last_opened_file = open(self._current_log_file, "w")
-        self._iperf_process = subprocess.Popen(
-            command, stdout=self._last_opened_file, stderr=subprocess.DEVNULL
-        )
-        for attempts_left in reversed(range(3)):
-            try:
-                self._port = int(
-                    _get_port_from_ss_output(
-                        job.run("ss -l -p -n | grep iperf").stdout,
-                        self._iperf_process.pid,
-                    )
-                )
-                break
-            except ProcessLookupError:
-                if attempts_left == 0:
-                    raise
-                logging.debug("iperf3 process not started yet.")
-                time.sleep(0.01)
-
-    def stop(self):
-        """Stops the iperf server.
-
-        Returns:
-            The name of the log file generated from the terminated session.
-        """
-        if self._iperf_process is None:
-            return
-
-        if self._last_opened_file:
-            self._last_opened_file.close()
-            self._last_opened_file = None
-
-        self._iperf_process.terminate()
-        self._iperf_process = None
-
-        return self._current_log_file
-
-    def __del__(self):
-        self.stop()
-
-
-class IPerfServerOverSsh(IPerfServerBase):
-    """Class that handles iperf3 operations on remote machines."""
-
-    def __init__(self, ssh_settings, port, test_interface=None, use_killall=False):
-        super().__init__(port)
-        self.ssh_settings = ssh_settings
-        self.log = acts_logger.create_tagged_trace_logger(
-            f"IPerfServer | {self.ssh_settings.hostname}"
-        )
-        self._ssh_session = None
-        self.start_ssh()
-
-        self._iperf_pid = None
-        self._current_tag = None
-        self.hostname = self.ssh_settings.hostname
-        self._use_killall = str(use_killall).lower() == "true"
-        try:
-            # A test interface can only be found if an ip address is specified.
-            # A fully qualified hostname will return None for the
-            # test_interface.
-            self.test_interface = (
-                test_interface
-                if test_interface
-                else utils.get_interface_based_on_ip(self._ssh_session, self.hostname)
-            )
-        except Exception as e:
-            self.log.warning(e)
-            self.test_interface = None
-
-    @property
-    def port(self):
-        return self._port
-
-    @property
-    def started(self):
-        return self._iperf_pid is not None
-
-    def _get_remote_log_path(self):
-        return "/tmp/iperf_server_port%s.log" % self.port
-
-    def get_interface_ip_addresses(self, interface):
-        """Gets all of the ip addresses, ipv4 and ipv6, associated with a
-           particular interface name.
-
-        Args:
-            interface: The interface name on the device, ie eth0
-
-        Returns:
-            A list of dictionaries of the various IP addresses. See
-            utils.get_interface_ip_addresses.
-        """
-        if not self._ssh_session:
-            self.start_ssh()
-
-        return utils.get_interface_ip_addresses(self._ssh_session, interface)
-
-    def renew_test_interface_ip_address(self):
-        """Renews the test interface's IPv4 address.
-
-        Necessary for changing DHCP scopes during a test.
-        """
-        if not self._ssh_session:
-            self.start_ssh()
-        utils.renew_linux_ip_address(self._ssh_session, self.test_interface)
-
-    def get_addr(self, addr_type="ipv4_private", timeout_sec=None):
-        """Wait until a type of IP address on the test interface is available
-        then return it.
-        """
-        if not self._ssh_session:
-            self.start_ssh()
-        return utils.get_addr(
-            self._ssh_session, self.test_interface, addr_type, timeout_sec
-        )
-
-    def _cleanup_iperf_port(self):
-        """Checks and kills zombie iperf servers occupying intended port."""
-        iperf_check_cmd = (
-            "netstat -tulpn | grep LISTEN | grep iperf3" " | grep :{}"
-        ).format(self.port)
-        iperf_check = self._ssh_session.run(iperf_check_cmd, ignore_status=True)
-        iperf_check = iperf_check.stdout
-        if iperf_check:
-            logging.debug("Killing zombie server on port {}".format(self.port))
-            iperf_pid = iperf_check.split(" ")[-1].split("/")[0]
-            self._ssh_session.run("kill -9 {}".format(str(iperf_pid)))
-
-    def start(self, extra_args="", tag="", iperf_binary=None):
-        """Starts iperf server on specified machine and port.
-
-        Args:
-            extra_args: A string representing extra arguments to start iperf
-                server with.
-            tag: Appended to log file name to identify logs from different
-                iperf runs.
-            iperf_binary: Location of iperf3 binary. If none, it is assumed the
-                the binary is in the path.
-        """
-        if self.started:
-            return
-
-        if not self._ssh_session:
-            self.start_ssh()
-        self._cleanup_iperf_port()
-        if not iperf_binary:
-            logging.debug(
-                "No iperf3 binary specified.  " "Assuming iperf3 is in the path."
-            )
-            iperf_binary = "iperf3"
-        else:
-            logging.debug("Using iperf3 binary located at %s" % iperf_binary)
-        iperf_command = "{} -s -J -p {}".format(iperf_binary, self.port)
-
-        cmd = "{cmd} {extra_flags} > {log_file}".format(
-            cmd=iperf_command,
-            extra_flags=extra_args,
-            log_file=self._get_remote_log_path(),
-        )
-
-        job_result = self._ssh_session.run_async(cmd)
-        self._iperf_pid = job_result.stdout
-        self._current_tag = tag
-
-    def stop(self):
-        """Stops the iperf server.
-
-        Returns:
-            The name of the log file generated from the terminated session.
-        """
-        if not self.started:
-            return
-
-        if self._use_killall:
-            self._ssh_session.run("killall iperf3", ignore_status=True)
-        else:
-            self._ssh_session.run_async("kill -9 {}".format(str(self._iperf_pid)))
-
-        iperf_result = self._ssh_session.run(
-            "cat {}".format(self._get_remote_log_path())
-        )
-
-        log_file = self._get_full_file_path(self._current_tag)
-        with open(log_file, "w") as f:
-            f.write(iperf_result.stdout)
-
-        self._ssh_session.run_async("rm {}".format(self._get_remote_log_path()))
-        self._iperf_pid = None
-        return log_file
-
-    def start_ssh(self):
-        """Starts an ssh session to the iperf server."""
-        if not self._ssh_session:
-            self._ssh_session = connection.SshConnection(self.ssh_settings)
-
-    def close_ssh(self):
-        """Closes the ssh session to the iperf server, if one exists, preventing
-        connection reset errors when rebooting server device.
-        """
-        if self.started:
-            self.stop()
-        if self._ssh_session:
-            self._ssh_session.close()
-            self._ssh_session = None
-
-
-# TODO(markdr): Remove this after automagic controller creation has been
-# removed.
-class _AndroidDeviceBridge(object):
-    """A helper class for connecting serial numbers to AndroidDevices."""
-
-    _test_class = None
-
-    @staticmethod
-    @subscribe_static(TestClassBeginEvent)
-    def on_test_begin(event):
-        _AndroidDeviceBridge._test_class = event.test_class
-
-    @staticmethod
-    @subscribe_static(TestClassEndEvent)
-    def on_test_end(_):
-        _AndroidDeviceBridge._test_class = None
-
-    @staticmethod
-    def android_devices():
-        """A dict of serial -> AndroidDevice, where AndroidDevice is a device
-        found in the current TestClass's controllers.
-        """
-        if not _AndroidDeviceBridge._test_class:
-            return {}
-        return {
-            device.serial: device
-            for device in _AndroidDeviceBridge._test_class.android_devices
-        }
-
-
-event_bus.register_subscription(_AndroidDeviceBridge.on_test_begin.subscription)
-event_bus.register_subscription(_AndroidDeviceBridge.on_test_end.subscription)
-
-
-class IPerfServerOverAdb(IPerfServerBase):
-    """Class that handles iperf3 operations over ADB devices."""
-
-    def __init__(self, android_device_or_serial, port):
-        """Creates a new IPerfServerOverAdb object.
-
-        Args:
-            android_device_or_serial: Either an AndroidDevice object, or the
-                serial that corresponds to the AndroidDevice. Note that the
-                serial must be present in an AndroidDevice entry in the ACTS
-                config.
-            port: The port number to open the iperf server on.
-        """
-        super().__init__(port)
-        self._android_device_or_serial = android_device_or_serial
-
-        self._iperf_process = None
-        self._current_tag = ""
-
-    @property
-    def port(self):
-        return self._port
-
-    @property
-    def started(self):
-        return self._iperf_process is not None
-
-    @property
-    def _android_device(self):
-        if isinstance(self._android_device_or_serial, AndroidDevice):
-            return self._android_device_or_serial
-        else:
-            return _AndroidDeviceBridge.android_devices()[
-                self._android_device_or_serial
-            ]
-
-    def _get_device_log_path(self):
-        return "~/data/iperf_server_port%s.log" % self.port
-
-    def start(self, extra_args="", tag="", iperf_binary=None):
-        """Starts iperf server on an ADB device.
-
-        Args:
-            extra_args: A string representing extra arguments to start iperf
-                server with.
-            tag: Appended to log file name to identify logs from different
-                iperf runs.
-            iperf_binary: Location of iperf3 binary. If none, it is assumed the
-                the binary is in the path.
-        """
-        if self._iperf_process is not None:
-            return
-
-        if not iperf_binary:
-            logging.debug(
-                "No iperf3 binary specified.  " "Assuming iperf3 is in the path."
-            )
-            iperf_binary = "iperf3"
-        else:
-            logging.debug("Using iperf3 binary located at %s" % iperf_binary)
-        iperf_command = "{} -s -J -p {}".format(iperf_binary, self.port)
-
-        self._iperf_process = self._android_device.adb.shell_nb(
-            "{cmd} {extra_flags} > {log_file} 2>&1".format(
-                cmd=iperf_command,
-                extra_flags=extra_args,
-                log_file=self._get_device_log_path(),
-            )
-        )
-
-        self._iperf_process_adb_pid = ""
-        while len(self._iperf_process_adb_pid) == 0:
-            self._iperf_process_adb_pid = self._android_device.adb.shell(
-                "pgrep iperf3 -n"
-            )
-
-        self._current_tag = tag
-
-    def stop(self):
-        """Stops the iperf server.
-
-        Returns:
-            The name of the log file generated from the terminated session.
-        """
-        if self._iperf_process is None:
-            return
-
-        job.run("kill -9 {}".format(self._iperf_process.pid))
-
-        # TODO(markdr): update with definitive kill method
-        while True:
-            iperf_process_list = self._android_device.adb.shell("pgrep iperf3")
-            if iperf_process_list.find(self._iperf_process_adb_pid) == -1:
-                break
-            else:
-                self._android_device.adb.shell(
-                    "kill -9 {}".format(self._iperf_process_adb_pid)
-                )
-
-        iperf_result = self._android_device.adb.shell(
-            "cat {}".format(self._get_device_log_path())
-        )
-
-        log_file = self._get_full_file_path(self._current_tag)
-        with open(log_file, "w") as f:
-            f.write(iperf_result)
-
-        self._android_device.adb.shell("rm {}".format(self._get_device_log_path()))
-
-        self._iperf_process = None
-        return log_file
diff --git a/src/antlion/controllers/openwrt_ap.py b/src/antlion/controllers/openwrt_ap.py
deleted file mode 100644
index dc99ef2..0000000
--- a/src/antlion/controllers/openwrt_ap.py
+++ /dev/null
@@ -1,719 +0,0 @@
-"""Controller for Open WRT access point."""
-
-import random
-import re
-import time
-
-from antlion import logger
-from antlion import signals
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.openwrt_lib import network_settings
-from antlion.controllers.openwrt_lib import wireless_config
-from antlion.controllers.openwrt_lib import wireless_settings_applier
-from antlion.controllers.openwrt_lib.openwrt_constants import (
-    OpenWrtModelMap as modelmap,
-)
-from antlion.controllers.openwrt_lib.openwrt_constants import OpenWrtWifiSetting
-from antlion.controllers.openwrt_lib.openwrt_constants import SYSTEM_INFO_CMD
-from antlion.controllers.utils_lib.ssh import connection
-from antlion.controllers.utils_lib.ssh import settings
-import yaml
-
-MOBLY_CONTROLLER_CONFIG_NAME = "OpenWrtAP"
-ACTS_CONTROLLER_REFERENCE_NAME = "access_points"
-OPEN_SECURITY = "none"
-PSK1_SECURITY = "psk"
-PSK_SECURITY = "psk2"
-WEP_SECURITY = "wep"
-ENT_SECURITY = "wpa2"
-OWE_SECURITY = "owe"
-SAE_SECURITY = "sae"
-SAEMIXED_SECURITY = "sae-mixed"
-ENABLE_RADIO = "0"
-PMF_ENABLED = 2
-WIFI_2G = "wifi2g"
-WIFI_5G = "wifi5g"
-WAIT_TIME = 20
-DEFAULT_RADIOS = ("radio0", "radio1")
-
-
-def create(configs):
-    """Creates ap controllers from a json config.
-
-    Creates an ap controller from either a list, or a single element. The element
-    can either be just the hostname or a dictionary containing the hostname and
-    username of the AP to connect to over SSH.
-
-    Args:
-      configs: The json configs that represent this controller.
-
-    Returns:
-      AccessPoint object
-
-    Example:
-      Below is the config file entry for OpenWrtAP as a list. A testbed can have
-      1 or more APs to configure. Each AP has a "ssh_config" key to provide SSH
-      login information. OpenWrtAP#__init__() uses this to create SSH object.
-
-        "OpenWrtAP": [
-          {
-            "ssh_config": {
-              "user" : "root",
-              "host" : "192.168.1.1"
-            }
-          },
-          {
-            "ssh_config": {
-              "user" : "root",
-              "host" : "192.168.1.2"
-            }
-          }
-        ]
-    """
-    return [OpenWrtAP(c) for c in configs]
-
-
-def destroy(aps):
-    """Destroys a list of AccessPoints.
-
-    Args:
-      aps: The list of AccessPoints to destroy.
-    """
-    for ap in aps:
-        ap.close()
-        ap.close_ssh()
-
-
-def get_info(aps):
-    """Get information on a list of access points.
-
-    Args:
-      aps: A list of AccessPoints.
-
-    Returns:
-      A list of all aps hostname.
-    """
-    return [ap.ssh_settings.hostname for ap in aps]
-
-
-class OpenWrtAP(object):
-    """An AccessPoint controller.
-
-    Attributes:
-      ssh: The ssh connection to the AP.
-      ssh_settings: The ssh settings being used by the ssh connection.
-      log: Logging object for AccessPoint.
-      wireless_setting: object holding wireless configuration.
-      network_setting: Object for network configuration.
-      model: OpenWrt HW model.
-      radios: Fit interface for test.
-    """
-
-    def __init__(self, config):
-        """Initialize AP."""
-        self.ssh_settings = settings.from_config(config["ssh_config"])
-        self.ssh = connection.SshConnection(self.ssh_settings)
-        self.log = logger.create_logger(
-            lambda msg: "[OpenWrtAP|%s] %s" % (self.ssh_settings.hostname, msg)
-        )
-        self.wireless_setting = None
-        self.network_setting = network_settings.NetworkSettings(
-            self.ssh, self.ssh_settings, self.log
-        )
-        self.model = self.get_model_name()
-        if self.model in modelmap.__dict__:
-            self.radios = modelmap.__dict__[self.model]
-        else:
-            self.radios = DEFAULT_RADIOS
-
-    def configure_ap(self, wifi_configs, channel_2g, channel_5g):
-        """Configure AP with the required settings.
-
-        Each test class inherits WifiBaseTest. Based on the test, we may need to
-        configure PSK, WEP, OPEN, ENT networks on 2G and 5G bands in any
-        combination. We call WifiBaseTest methods get_psk_network(),
-        get_open_network(), get_wep_network() and get_ent_network() to create
-        dictionaries which contains this information. 'wifi_configs' is a list of
-        such dictionaries. Example below configures 2 WiFi networks - 1 PSK 2G and
-        1 Open 5G on one AP. configure_ap() is called from WifiBaseTest to
-        configure the APs.
-
-        wifi_configs = [
-          {
-            '2g': {
-              'SSID': '2g_AkqXWPK4',
-              'security': 'psk2',
-              'password': 'YgYuXqDO9H',
-              'hiddenSSID': False
-            },
-          },
-          {
-            '5g': {
-              'SSID': '5g_8IcMR1Sg',
-              'security': 'none',
-              'hiddenSSID': False
-            },
-          }
-        ]
-
-        Args:
-          wifi_configs: list of network settings for 2G and 5G bands.
-          channel_2g: channel for 2G band.
-          channel_5g: channel for 5G band.
-        """
-        # generate wifi configs to configure
-        wireless_configs = self.generate_wireless_configs(wifi_configs)
-        self.wireless_setting = wireless_settings_applier.WirelessSettingsApplier(
-            self.ssh,
-            wireless_configs,
-            channel_2g,
-            channel_5g,
-            self.radios[1],
-            self.radios[0],
-        )
-        self.wireless_setting.apply_wireless_settings()
-
-    def start_ap(self):
-        """Starts the AP with the settings in /etc/config/wireless."""
-        self.ssh.run("wifi up")
-        curr_time = time.time()
-        while time.time() < curr_time + WAIT_TIME:
-            if self.get_wifi_status():
-                return
-            time.sleep(3)
-        if not self.get_wifi_status():
-            raise ValueError("Failed to turn on WiFi on the AP.")
-
-    def stop_ap(self):
-        """Stops the AP."""
-        self.ssh.run("wifi down")
-        curr_time = time.time()
-        while time.time() < curr_time + WAIT_TIME:
-            if not self.get_wifi_status():
-                return
-            time.sleep(3)
-        if self.get_wifi_status():
-            raise ValueError("Failed to turn off WiFi on the AP.")
-
-    def get_bssids_for_wifi_networks(self):
-        """Get BSSIDs for wifi networks configured.
-
-        Returns:
-          Dictionary of SSID - BSSID map for both bands.
-        """
-        bssid_map = {"2g": {}, "5g": {}}
-        for radio in self.radios:
-            ssid_ifname_map = self.get_ifnames_for_ssids(radio)
-            if radio == self.radios[0]:
-                for ssid, ifname in ssid_ifname_map.items():
-                    bssid_map["5g"][ssid] = self.get_bssid(ifname)
-            elif radio == self.radios[1]:
-                for ssid, ifname in ssid_ifname_map.items():
-                    bssid_map["2g"][ssid] = self.get_bssid(ifname)
-        return bssid_map
-
-    def get_ifnames_for_ssids(self, radio):
-        """Get interfaces for wifi networks.
-
-        Args:
-          radio: 2g or 5g radio get the bssids from.
-
-        Returns:
-          dictionary of ssid - ifname mappings.
-        """
-        ssid_ifname_map = {}
-        str_output = self.ssh.run("wifi status %s" % radio).stdout
-        wifi_status = yaml.load(
-            str_output.replace("\t", "").replace("\n", ""), Loader=yaml.SafeLoader
-        )
-        wifi_status = wifi_status[radio]
-        if wifi_status["up"]:
-            interfaces = wifi_status["interfaces"]
-            for config in interfaces:
-                ssid = config["config"]["ssid"]
-                ifname = config["ifname"]
-                ssid_ifname_map[ssid] = ifname
-        return ssid_ifname_map
-
-    def get_bssid(self, ifname):
-        """Get MAC address from an interface.
-
-        Args:
-          ifname: interface name of the corresponding MAC.
-
-        Returns:
-          BSSID of the interface.
-        """
-        ifconfig = self.ssh.run("ifconfig %s" % ifname).stdout
-        mac_addr = ifconfig.split("\n")[0].split()[-1]
-        return mac_addr
-
-    def set_wpa_encryption(self, encryption):
-        """Set different encryptions to wpa or wpa2.
-
-        Args:
-          encryption: ccmp, tkip, or ccmp+tkip.
-        """
-        str_output = self.ssh.run("wifi status").stdout
-        wifi_status = yaml.load(
-            str_output.replace("\t", "").replace("\n", ""), Loader=yaml.SafeLoader
-        )
-
-        # Counting how many interface are enabled.
-        total_interface = 0
-        for radio in self.radios:
-            num_interface = len(wifi_status[radio]["interfaces"])
-            total_interface += num_interface
-
-        # Iterates every interface to get and set wpa encryption.
-        default_extra_interface = 2
-        for i in range(total_interface + default_extra_interface):
-            origin_encryption = self.ssh.run(
-                "uci get wireless.@wifi-iface[{}].encryption".format(i)
-            ).stdout
-            origin_psk_pattern = re.match(r"psk\b", origin_encryption)
-            target_psk_pattern = re.match(r"psk\b", encryption)
-            origin_psk2_pattern = re.match(r"psk2\b", origin_encryption)
-            target_psk2_pattern = re.match(r"psk2\b", encryption)
-
-            if origin_psk_pattern == target_psk_pattern:
-                self.ssh.run(
-                    "uci set wireless.@wifi-iface[{}].encryption={}".format(
-                        i, encryption
-                    )
-                )
-
-            if origin_psk2_pattern == target_psk2_pattern:
-                self.ssh.run(
-                    "uci set wireless.@wifi-iface[{}].encryption={}".format(
-                        i, encryption
-                    )
-                )
-
-        self.ssh.run("uci commit wireless")
-        self.ssh.run("wifi")
-
-    def set_password(self, pwd_5g=None, pwd_2g=None):
-        """Set password for individual interface.
-
-        Args:
-            pwd_5g: 8 ~ 63 chars, ascii letters and digits password for 5g network.
-            pwd_2g: 8 ~ 63 chars, ascii letters and digits password for 2g network.
-        """
-        if pwd_5g:
-            if len(pwd_5g) < 8 or len(pwd_5g) > 63:
-                self.log.error("Password must be 8~63 characters long")
-            # Only accept ascii letters and digits
-            elif not re.match("^[A-Za-z0-9]*$", pwd_5g):
-                self.log.error("Password must only contains ascii letters and digits")
-            else:
-                self.ssh.run(
-                    "uci set wireless.@wifi-iface[{}].key={}".format(3, pwd_5g)
-                )
-                self.log.info("Set 5G password to :{}".format(pwd_5g))
-
-        if pwd_2g:
-            if len(pwd_2g) < 8 or len(pwd_2g) > 63:
-                self.log.error("Password must be 8~63 characters long")
-            # Only accept ascii letters and digits
-            elif not re.match("^[A-Za-z0-9]*$", pwd_2g):
-                self.log.error("Password must only contains ascii letters and digits")
-            else:
-                self.ssh.run(
-                    "uci set wireless.@wifi-iface[{}].key={}".format(2, pwd_2g)
-                )
-                self.log.info("Set 2G password to :{}".format(pwd_2g))
-
-        self.ssh.run("uci commit wireless")
-        self.ssh.run("wifi")
-
-    def set_ssid(self, ssid_5g=None, ssid_2g=None):
-        """Set SSID for individual interface.
-
-        Args:
-            ssid_5g: 8 ~ 63 chars for 5g network.
-            ssid_2g: 8 ~ 63 chars for 2g network.
-        """
-        if ssid_5g:
-            if len(ssid_5g) < 8 or len(ssid_5g) > 63:
-                self.log.error("SSID must be 8~63 characters long")
-            # Only accept ascii letters and digits
-            else:
-                self.ssh.run(
-                    "uci set wireless.@wifi-iface[{}].ssid={}".format(3, ssid_5g)
-                )
-                self.log.info("Set 5G SSID to :{}".format(ssid_5g))
-
-        if ssid_2g:
-            if len(ssid_2g) < 8 or len(ssid_2g) > 63:
-                self.log.error("SSID must be 8~63 characters long")
-            # Only accept ascii letters and digits
-            else:
-                self.ssh.run(
-                    "uci set wireless.@wifi-iface[{}].ssid={}".format(2, ssid_2g)
-                )
-                self.log.info("Set 2G SSID to :{}".format(ssid_2g))
-
-        self.ssh.run("uci commit wireless")
-        self.ssh.run("wifi")
-
-    def generate_mobility_domain(self):
-        """Generate 4-character hexadecimal ID.
-
-        Returns:
-          String; a 4-character hexadecimal ID.
-        """
-        md = "{:04x}".format(random.getrandbits(16))
-        self.log.info("Mobility Domain ID: {}".format(md))
-        return md
-
-    def enable_80211r(self, iface, md):
-        """Enable 802.11r for one single radio.
-
-        Args:
-          iface: index number of wifi-iface.
-                  2: radio1
-                  3: radio0
-          md: mobility domain. a 4-character hexadecimal ID.
-        Raises:
-          TestSkip if 2g or 5g radio is not up or 802.11r is not enabled.
-        """
-        str_output = self.ssh.run("wifi status").stdout
-        wifi_status = yaml.load(
-            str_output.replace("\t", "").replace("\n", ""), Loader=yaml.SafeLoader
-        )
-        # Check if the radio is up.
-        if iface == OpenWrtWifiSetting.IFACE_2G:
-            if wifi_status[self.radios[1]]["up"]:
-                self.log.info("2g network is ENABLED")
-            else:
-                raise signals.TestSkip("2g network is NOT ENABLED")
-        elif iface == OpenWrtWifiSetting.IFACE_5G:
-            if wifi_status[self.radios[0]]["up"]:
-                self.log.info("5g network is ENABLED")
-            else:
-                raise signals.TestSkip("5g network is NOT ENABLED")
-
-        # Setup 802.11r.
-        self.ssh.run("uci set wireless.@wifi-iface[{}].ieee80211r='1'".format(iface))
-        self.ssh.run(
-            "uci set wireless.@wifi-iface[{}].ft_psk_generate_local='1'".format(iface)
-        )
-        self.ssh.run(
-            "uci set wireless.@wifi-iface[{}].mobility_domain='{}'".format(iface, md)
-        )
-        self.ssh.run("uci commit wireless")
-        self.ssh.run("wifi")
-
-        # Check if 802.11r is enabled.
-        result = self.ssh.run(
-            "uci get wireless.@wifi-iface[{}].ieee80211r".format(iface)
-        ).stdout
-        if result == "1":
-            self.log.info("802.11r is ENABLED")
-        else:
-            raise signals.TestSkip("802.11r is NOT ENABLED")
-
-    def generate_wireless_configs(self, wifi_configs):
-        """Generate wireless configs to configure.
-
-        Converts wifi_configs from configure_ap() to a list of 'WirelessConfig'
-        objects. Each object represents a wifi network to configure on the AP.
-
-        Args:
-          wifi_configs: Network list of different security types and bands.
-
-        Returns:
-          wireless configuration for openwrt AP.
-        """
-        num_2g = 1
-        num_5g = 1
-        wireless_configs = []
-
-        for i in range(len(wifi_configs)):
-            if hostapd_constants.BAND_2G in wifi_configs[i]:
-                config = wifi_configs[i][hostapd_constants.BAND_2G]
-                if config["security"] == PSK_SECURITY:
-                    wireless_configs.append(
-                        wireless_config.WirelessConfig(
-                            "%s%s" % (WIFI_2G, num_2g),
-                            config["SSID"],
-                            config["security"],
-                            hostapd_constants.BAND_2G,
-                            password=config["password"],
-                            hidden=config["hiddenSSID"],
-                            ieee80211w=config["ieee80211w"],
-                        )
-                    )
-                elif config["security"] == PSK1_SECURITY:
-                    wireless_configs.append(
-                        wireless_config.WirelessConfig(
-                            "%s%s" % (WIFI_2G, num_2g),
-                            config["SSID"],
-                            config["security"],
-                            hostapd_constants.BAND_2G,
-                            password=config["password"],
-                            hidden=config["hiddenSSID"],
-                            ieee80211w=config["ieee80211w"],
-                        )
-                    )
-                elif config["security"] == WEP_SECURITY:
-                    wireless_configs.append(
-                        wireless_config.WirelessConfig(
-                            "%s%s" % (WIFI_2G, num_2g),
-                            config["SSID"],
-                            config["security"],
-                            hostapd_constants.BAND_2G,
-                            wep_key=config["wepKeys"][0],
-                            hidden=config["hiddenSSID"],
-                        )
-                    )
-                elif config["security"] == OPEN_SECURITY:
-                    wireless_configs.append(
-                        wireless_config.WirelessConfig(
-                            "%s%s" % (WIFI_2G, num_2g),
-                            config["SSID"],
-                            config["security"],
-                            hostapd_constants.BAND_2G,
-                            hidden=config["hiddenSSID"],
-                        )
-                    )
-                elif config["security"] == OWE_SECURITY:
-                    wireless_configs.append(
-                        wireless_config.WirelessConfig(
-                            "%s%s" % (WIFI_2G, num_2g),
-                            config["SSID"],
-                            config["security"],
-                            hostapd_constants.BAND_2G,
-                            hidden=config["hiddenSSID"],
-                            ieee80211w=PMF_ENABLED,
-                        )
-                    )
-                elif config["security"] == SAE_SECURITY:
-                    wireless_configs.append(
-                        wireless_config.WirelessConfig(
-                            "%s%s" % (WIFI_2G, num_2g),
-                            config["SSID"],
-                            config["security"],
-                            hostapd_constants.BAND_2G,
-                            password=config["password"],
-                            hidden=config["hiddenSSID"],
-                            ieee80211w=PMF_ENABLED,
-                        )
-                    )
-                elif config["security"] == SAEMIXED_SECURITY:
-                    wireless_configs.append(
-                        wireless_config.WirelessConfig(
-                            "%s%s" % (WIFI_2G, num_2g),
-                            config["SSID"],
-                            config["security"],
-                            hostapd_constants.BAND_2G,
-                            password=config["password"],
-                            hidden=config["hiddenSSID"],
-                            ieee80211w=config["ieee80211w"],
-                        )
-                    )
-                elif config["security"] == ENT_SECURITY:
-                    wireless_configs.append(
-                        wireless_config.WirelessConfig(
-                            "%s%s" % (WIFI_2G, num_2g),
-                            config["SSID"],
-                            config["security"],
-                            hostapd_constants.BAND_2G,
-                            radius_server_ip=config["radius_server_ip"],
-                            radius_server_port=config["radius_server_port"],
-                            radius_server_secret=config["radius_server_secret"],
-                            hidden=config["hiddenSSID"],
-                        )
-                    )
-                num_2g += 1
-            if hostapd_constants.BAND_5G in wifi_configs[i]:
-                config = wifi_configs[i][hostapd_constants.BAND_5G]
-                if config["security"] == PSK_SECURITY:
-                    wireless_configs.append(
-                        wireless_config.WirelessConfig(
-                            "%s%s" % (WIFI_5G, num_5g),
-                            config["SSID"],
-                            config["security"],
-                            hostapd_constants.BAND_5G,
-                            password=config["password"],
-                            hidden=config["hiddenSSID"],
-                            ieee80211w=config["ieee80211w"],
-                        )
-                    )
-                elif config["security"] == PSK1_SECURITY:
-                    wireless_configs.append(
-                        wireless_config.WirelessConfig(
-                            "%s%s" % (WIFI_5G, num_5g),
-                            config["SSID"],
-                            config["security"],
-                            hostapd_constants.BAND_5G,
-                            password=config["password"],
-                            hidden=config["hiddenSSID"],
-                            ieee80211w=config["ieee80211w"],
-                        )
-                    )
-                elif config["security"] == WEP_SECURITY:
-                    wireless_configs.append(
-                        wireless_config.WirelessConfig(
-                            "%s%s" % (WIFI_5G, num_5g),
-                            config["SSID"],
-                            config["security"],
-                            hostapd_constants.BAND_5G,
-                            wep_key=config["wepKeys"][0],
-                            hidden=config["hiddenSSID"],
-                        )
-                    )
-                elif config["security"] == OPEN_SECURITY:
-                    wireless_configs.append(
-                        wireless_config.WirelessConfig(
-                            "%s%s" % (WIFI_5G, num_5g),
-                            config["SSID"],
-                            config["security"],
-                            hostapd_constants.BAND_5G,
-                            hidden=config["hiddenSSID"],
-                        )
-                    )
-                elif config["security"] == OWE_SECURITY:
-                    wireless_configs.append(
-                        wireless_config.WirelessConfig(
-                            "%s%s" % (WIFI_5G, num_5g),
-                            config["SSID"],
-                            config["security"],
-                            hostapd_constants.BAND_5G,
-                            hidden=config["hiddenSSID"],
-                            ieee80211w=PMF_ENABLED,
-                        )
-                    )
-                elif config["security"] == SAE_SECURITY:
-                    wireless_configs.append(
-                        wireless_config.WirelessConfig(
-                            "%s%s" % (WIFI_5G, num_5g),
-                            config["SSID"],
-                            config["security"],
-                            hostapd_constants.BAND_5G,
-                            password=config["password"],
-                            hidden=config["hiddenSSID"],
-                            ieee80211w=PMF_ENABLED,
-                        )
-                    )
-                elif config["security"] == SAEMIXED_SECURITY:
-                    wireless_configs.append(
-                        wireless_config.WirelessConfig(
-                            "%s%s" % (WIFI_5G, num_5g),
-                            config["SSID"],
-                            config["security"],
-                            hostapd_constants.BAND_5G,
-                            password=config["password"],
-                            hidden=config["hiddenSSID"],
-                            ieee80211w=config["ieee80211w"],
-                        )
-                    )
-                elif config["security"] == ENT_SECURITY:
-                    wireless_configs.append(
-                        wireless_config.WirelessConfig(
-                            "%s%s" % (WIFI_5G, num_5g),
-                            config["SSID"],
-                            config["security"],
-                            hostapd_constants.BAND_5G,
-                            radius_server_ip=config["radius_server_ip"],
-                            radius_server_port=config["radius_server_port"],
-                            radius_server_secret=config["radius_server_secret"],
-                            hidden=config["hiddenSSID"],
-                        )
-                    )
-                num_5g += 1
-
-        return wireless_configs
-
-    def get_wifi_network(self, security=None, band=None):
-        """Return first match wifi interface's config.
-
-        Args:
-          security: psk2 or none
-          band: '2g' or '5g'
-
-        Returns:
-          A dict contains match wifi interface's config.
-        """
-
-        for wifi_iface in self.wireless_setting.wireless_configs:
-            match_list = []
-            wifi_network = wifi_iface.__dict__
-            if security:
-                match_list.append(security == wifi_network["security"])
-            if band:
-                match_list.append(band == wifi_network["band"])
-
-            if all(match_list):
-                wifi_network["SSID"] = wifi_network["ssid"]
-                if not wifi_network["password"]:
-                    del wifi_network["password"]
-                return wifi_network
-        return None
-
-    def get_wifi_status(self):
-        """Check if radios are up. Default are 2G and 5G bands.
-
-        Returns:
-          True if both radios are up. False if not.
-        """
-        status = True
-        for radio in self.radios:
-            try:
-                str_output = self.ssh.run("wifi status %s" % radio).stdout
-                wifi_status = yaml.load(
-                    str_output.replace("\t", "").replace("\n", ""),
-                    Loader=yaml.SafeLoader,
-                )
-                status = wifi_status[radio]["up"] and status
-            except:
-                self.log.info("Failed to make ssh connection to the OpenWrt")
-                return False
-        return status
-
-    def verify_wifi_status(self, timeout=20):
-        """Ensure wifi interfaces are ready.
-
-        Args:
-          timeout: An integer that is the number of times to try
-                   wait for interface ready.
-        Returns:
-          True if both radios are up. False if not.
-        """
-        start_time = time.time()
-        end_time = start_time + timeout
-        while time.time() < end_time:
-            if self.get_wifi_status():
-                return True
-            time.sleep(1)
-        return False
-
-    def get_model_name(self):
-        """Get Openwrt model name.
-
-        Returns:
-          A string include device brand and model. e.g. NETGEAR_R8000
-        """
-        out = self.ssh.run(SYSTEM_INFO_CMD).stdout.split("\n")
-        for line in out:
-            if "board_name" in line:
-                model = line.split()[1].strip('",').split(",")
-                return "_".join(map(lambda i: i.upper(), model))
-        self.log.info("Failed to retrieve OpenWrt model information.")
-        return None
-
-    def close(self):
-        """Reset wireless and network settings to default and stop AP."""
-        if self.network_setting.config:
-            self.network_setting.cleanup_network_settings()
-        if self.wireless_setting:
-            self.wireless_setting.cleanup_wireless_settings()
-
-    def close_ssh(self):
-        """Close SSH connection to AP."""
-        self.ssh.close()
-
-    def reboot(self):
-        """Reboot Openwrt."""
-        self.ssh.run("reboot")
diff --git a/src/antlion/controllers/packet_sender.py b/src/antlion/controllers/packet_sender.py
deleted file mode 100644
index da22e79..0000000
--- a/src/antlion/controllers/packet_sender.py
+++ /dev/null
@@ -1,941 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Collection of utility functions to generate and send custom packets.
-
-"""
-import logging
-import multiprocessing
-import socket
-import time
-
-import antlion.signals
-
-# http://www.secdev.org/projects/scapy/
-# On ubuntu, sudo pip3 install scapy
-import scapy.all as scapy
-
-MOBLY_CONTROLLER_CONFIG_NAME = "PacketSender"
-ACTS_CONTROLLER_REFERENCE_NAME = "packet_senders"
-
-GET_FROM_LOCAL_INTERFACE = "get_local"
-MAC_BROADCAST = "ff:ff:ff:ff:ff:ff"
-IPV4_BROADCAST = "255.255.255.255"
-ARP_DST = "00:00:00:00:00:00"
-RA_MAC = "33:33:00:00:00:01"
-RA_IP = "ff02::1"
-RA_PREFIX = "d00d::"
-RA_PREFIX_LEN = 64
-DHCP_OFFER_OP = 2
-DHCP_OFFER_SRC_PORT = 67
-DHCP_OFFER_DST_PORT = 68
-DHCP_TRANS_ID = 0x01020304
-DNS_LEN = 3
-PING6_DATA = "BEST PING6 EVER"
-PING4_TYPE = 8
-MDNS_TTL = 255
-MDNS_QTYPE = "PTR"
-MDNS_UDP_PORT = 5353
-MDNS_V4_IP_DST = "224.0.0.251"
-MDNS_V4_MAC_DST = "01:00:5E:00:00:FB"
-MDNS_RECURSIVE = 1
-MDNS_V6_IP_DST = "FF02::FB"
-MDNS_V6_MAC_DST = "33:33:00:00:00:FB"
-ETH_TYPE_IP = 2048
-SAP_SPANNING_TREE = 0x42
-SNAP_OUI = 12
-SNAP_SSAP = 170
-SNAP_DSAP = 170
-SNAP_CTRL = 3
-LLC_XID_CONTROL = 191
-PAD_LEN_BYTES = 128
-
-
-def create(configs):
-    """Creates PacketSender controllers from a json config.
-
-    Args:
-        The json configs that represent this controller
-
-    Returns:
-        A new PacketSender
-    """
-    return [PacketSender(c) for c in configs]
-
-
-def destroy(objs):
-    """Destroys a list of PacketSenders and stops sending (if active).
-
-    Args:
-        objs: A list of PacketSenders
-    """
-    for pkt_sender in objs:
-        pkt_sender.stop_sending(True)
-    return
-
-
-def get_info(objs):
-    """Get information on a list of packet senders.
-
-    Args:
-        objs: A list of PacketSenders
-
-    Returns:
-        Network interface name that is being used by each packet sender
-    """
-    return [pkt_sender.interface for pkt_sender in objs]
-
-
-class ThreadSendPacket(multiprocessing.Process):
-    """Creates a thread that keeps sending the same packet until a stop signal.
-
-    Attributes:
-        stop_signal: signal to stop the thread execution
-        packet: desired packet to keep sending
-        interval: interval between consecutive packets (s)
-        interface: network interface name (e.g., 'eth0')
-        log: object used for logging
-    """
-
-    def __init__(self, signal, packet, interval, interface, log):
-        multiprocessing.Process.__init__(self)
-        self.stop_signal = signal
-        self.packet = packet
-        self.interval = interval
-        self.interface = interface
-        self.log = log
-
-    def run(self):
-        self.log.info("Packet Sending Started.")
-        while True:
-            if self.stop_signal.is_set():
-                # Poison pill means shutdown
-                self.log.info("Packet Sending Stopped.")
-                break
-
-            try:
-                scapy.sendp(self.packet, iface=self.interface, verbose=0)
-                time.sleep(self.interval)
-            except Exception:
-                self.log.exception("Exception when trying to send packet")
-                return
-
-        return
-
-
-class PacketSenderError(antlion.signals.ControllerError):
-    """Raises exceptions encountered in packet sender lib."""
-
-
-class PacketSender(object):
-    """Send any custom packet over a desired interface.
-
-    Attributes:
-        log: class logging object
-        thread_active: indicates whether or not the send thread is active
-        thread_send: thread object for the concurrent packet transmissions
-        stop_signal: event to stop the thread
-        interface: network interface name (e.g., 'eth0')
-    """
-
-    def __init__(self, ifname):
-        """Initiallize the PacketGenerator class.
-
-        Args:
-            ifname: network interface name that will be used packet generator
-        """
-        self.log = logging.getLogger()
-        self.packet = None
-        self.thread_active = False
-        self.thread_send = None
-        self.stop_signal = multiprocessing.Event()
-        self.interface = ifname
-
-    def send_ntimes(self, packet, ntimes, interval):
-        """Sends a packet ntimes at a given interval.
-
-        Args:
-            packet: custom built packet from Layer 2 up to Application layer
-            ntimes: number of packets to send
-            interval: interval between consecutive packet transmissions (s)
-        """
-        if packet is None:
-            raise PacketSenderError(
-                "There is no packet to send. Create a packet first."
-            )
-
-        for _ in range(ntimes):
-            try:
-                scapy.sendp(packet, iface=self.interface, verbose=0)
-                time.sleep(interval)
-            except socket.error as excpt:
-                self.log.exception("Caught socket exception : %s" % excpt)
-                return
-
-    def send_receive_ntimes(self, packet, ntimes, interval):
-        """Sends a packet and receives the reply ntimes at a given interval.
-
-        Args:
-            packet: custom built packet from Layer 2 up to Application layer
-            ntimes: number of packets to send
-            interval: interval between consecutive packet transmissions and
-                      the corresponding reply (s)
-        """
-        if packet is None:
-            raise PacketSenderError(
-                "There is no packet to send. Create a packet first."
-            )
-
-        for _ in range(ntimes):
-            try:
-                scapy.srp1(packet, iface=self.interface, timeout=interval, verbose=0)
-                time.sleep(interval)
-            except socket.error as excpt:
-                self.log.exception("Caught socket exception : %s" % excpt)
-                return
-
-    def start_sending(self, packet, interval):
-        """Sends packets in parallel with the main process.
-
-        Creates a thread and keeps sending the same packet at a given interval
-        until a stop signal is received
-
-        Args:
-            packet: custom built packet from Layer 2 up to Application layer
-            interval: interval between consecutive packets (s)
-        """
-        if packet is None:
-            raise PacketSenderError(
-                "There is no packet to send. Create a packet first."
-            )
-
-        if self.thread_active:
-            raise PacketSenderError(
-                (
-                    "There is already an active thread. Stop it"
-                    "before starting another transmission."
-                )
-            )
-
-        self.thread_send = ThreadSendPacket(
-            self.stop_signal, packet, interval, self.interface, self.log
-        )
-        self.thread_send.start()
-        self.thread_active = True
-
-    def stop_sending(self, ignore_status=False):
-        """Stops the concurrent thread that is continuously sending packets."""
-        if not self.thread_active:
-            if ignore_status:
-                return
-            else:
-                raise PacketSenderError(
-                    "Error: There is no acive thread running to stop."
-                )
-
-        # Stop thread
-        self.stop_signal.set()
-        self.thread_send.join()
-
-        # Just as precaution
-        if self.thread_send.is_alive():
-            self.thread_send.terminate()
-            self.log.warning("Packet Sending forced to terminate")
-
-        self.stop_signal.clear()
-        self.thread_send = None
-        self.thread_active = False
-
-
-class ArpGenerator(object):
-    """Creates a custom ARP packet
-
-    Attributes:
-        packet: desired built custom packet
-        src_mac: MAC address (Layer 2) of the source node
-        src_ipv4: IPv4 address (Layer 3) of the source node
-        dst_ipv4: IPv4 address (Layer 3) of the destination node
-    """
-
-    def __init__(self, **config_params):
-        """Initialize the class with the required network and packet params.
-
-        Args:
-            config_params: a dictionary with all the necessary packet fields.
-              Some fields can be generated automatically. For example:
-              {'subnet_mask': '255.255.255.0',
-               'dst_ipv4': '192.168.1.3',
-               'src_ipv4: 'get_local', ...
-              The key can also be 'get_local' which means the code will read
-              and use the local interface parameters
-        """
-        interf = config_params["interf"]
-        self.packet = None
-        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
-            self.src_mac = scapy.get_if_hwaddr(interf)
-        else:
-            self.src_mac = config_params["src_mac"]
-
-        self.dst_ipv4 = config_params["dst_ipv4"]
-        if config_params["src_ipv4"] == GET_FROM_LOCAL_INTERFACE:
-            self.src_ipv4 = scapy.get_if_addr(interf)
-        else:
-            self.src_ipv4 = config_params["src_ipv4"]
-
-    def generate(
-        self,
-        op="who-has",
-        ip_dst=None,
-        ip_src=None,
-        hwsrc=None,
-        hwdst=None,
-        eth_dst=None,
-    ):
-        """Generates a custom ARP packet.
-
-        Args:
-            op: ARP type (request or reply)
-            ip_dst: ARP ipv4 destination (Optional)
-            ip_src: ARP ipv4 source address (Optional)
-            hwsrc: ARP hardware source address (Optional)
-            hwdst: ARP hardware destination address (Optional)
-            eth_dst: Ethernet (layer 2) destination address (Optional)
-        """
-        # Create IP layer
-        hw_src = hwsrc if hwsrc is not None else self.src_mac
-        hw_dst = hwdst if hwdst is not None else ARP_DST
-        ipv4_dst = ip_dst if ip_dst is not None else self.dst_ipv4
-        ipv4_src = ip_src if ip_src is not None else self.src_ipv4
-        ip4 = scapy.ARP(op=op, pdst=ipv4_dst, psrc=ipv4_src, hwdst=hw_dst, hwsrc=hw_src)
-
-        # Create Ethernet layer
-        mac_dst = eth_dst if eth_dst is not None else MAC_BROADCAST
-        ethernet = scapy.Ether(src=self.src_mac, dst=mac_dst)
-
-        self.packet = ethernet / ip4
-        return self.packet
-
-
-class DhcpOfferGenerator(object):
-    """Creates a custom DHCP offer packet
-
-    Attributes:
-        packet: desired built custom packet
-        subnet_mask: local network subnet mask
-        src_mac: MAC address (Layer 2) of the source node
-        dst_mac: MAC address (Layer 2) of the destination node
-        src_ipv4: IPv4 address (Layer 3) of the source node
-        dst_ipv4: IPv4 address (Layer 3) of the destination node
-        gw_ipv4: IPv4 address (Layer 3) of the Gateway
-    """
-
-    def __init__(self, **config_params):
-        """Initialize the class with the required network and packet params.
-
-        Args:
-            config_params: contains all the necessary packet parameters.
-              Some fields can be generated automatically. For example:
-              {'subnet_mask': '255.255.255.0',
-               'dst_ipv4': '192.168.1.3',
-               'src_ipv4: 'get_local', ...
-              The key can also be 'get_local' which means the code will read
-              and use the local interface parameters
-        """
-        interf = config_params["interf"]
-        self.packet = None
-        self.subnet_mask = config_params["subnet_mask"]
-        self.dst_mac = config_params["dst_mac"]
-        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
-            self.src_mac = scapy.get_if_hwaddr(interf)
-        else:
-            self.src_mac = config_params["src_mac"]
-
-        self.dst_ipv4 = config_params["dst_ipv4"]
-        if config_params["src_ipv4"] == GET_FROM_LOCAL_INTERFACE:
-            self.src_ipv4 = scapy.get_if_addr(interf)
-        else:
-            self.src_ipv4 = config_params["src_ipv4"]
-
-        self.gw_ipv4 = config_params["gw_ipv4"]
-
-    def generate(self, cha_mac=None, dst_ip=None):
-        """Generates a DHCP offer packet.
-
-        Args:
-            cha_mac: hardware target address for DHCP offer (Optional)
-            dst_ip: ipv4 address of target host for renewal (Optional)
-        """
-
-        # Create DHCP layer
-        dhcp = scapy.DHCP(
-            options=[
-                ("message-type", "offer"),
-                ("subnet_mask", self.subnet_mask),
-                ("server_id", self.src_ipv4),
-                ("end"),
-            ]
-        )
-
-        # Overwrite standard DHCP fields
-        sta_hw = cha_mac if cha_mac is not None else self.dst_mac
-        sta_ip = dst_ip if dst_ip is not None else self.dst_ipv4
-
-        # Create Boot
-        bootp = scapy.BOOTP(
-            op=DHCP_OFFER_OP,
-            yiaddr=sta_ip,
-            siaddr=self.src_ipv4,
-            giaddr=self.gw_ipv4,
-            chaddr=scapy.mac2str(sta_hw),
-            xid=DHCP_TRANS_ID,
-        )
-
-        # Create UDP
-        udp = scapy.UDP(sport=DHCP_OFFER_SRC_PORT, dport=DHCP_OFFER_DST_PORT)
-
-        # Create IP layer
-        ip4 = scapy.IP(src=self.src_ipv4, dst=IPV4_BROADCAST)
-
-        # Create Ethernet layer
-        ethernet = scapy.Ether(dst=MAC_BROADCAST, src=self.src_mac)
-
-        self.packet = ethernet / ip4 / udp / bootp / dhcp
-        return self.packet
-
-
-class NsGenerator(object):
-    """Creates a custom Neighbor Solicitation (NS) packet
-
-    Attributes:
-        packet: desired built custom packet
-        src_mac: MAC address (Layer 2) of the source node
-        src_ipv6_type: IPv6 source address type (e.g., Link Local, Global, etc)
-        src_ipv6: IPv6 address (Layer 3) of the source node
-        dst_ipv6: IPv6 address (Layer 3) of the destination node
-    """
-
-    def __init__(self, **config_params):
-        """Initialize the class with the required network and packet params.
-
-        Args:
-            config_params: contains all the necessary packet parameters.
-              Some fields can be generated automatically. For example:
-              {'subnet_mask': '255.255.255.0',
-               'dst_ipv4': '192.168.1.3',
-               'src_ipv4: 'get_local', ...
-              The key can also be 'get_local' which means the code will read
-              and use the local interface parameters
-        """
-        interf = config_params["interf"]
-        self.packet = None
-        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
-            self.src_mac = scapy.get_if_hwaddr(interf)
-        else:
-            self.src_mac = config_params["src_mac"]
-
-        self.dst_ipv6 = config_params["dst_ipv6"]
-        self.src_ipv6_type = config_params["src_ipv6_type"]
-        if config_params["src_ipv6"] == GET_FROM_LOCAL_INTERFACE:
-            self.src_ipv6 = get_if_addr6(interf, self.src_ipv6_type)
-        else:
-            self.src_ipv6 = config_params["src_ipv6"]
-
-    def generate(self, ip_dst=None, eth_dst=None):
-        """Generates a Neighbor Solicitation (NS) packet (ICMP over IPv6).
-
-        Args:
-            ip_dst: NS ipv6 destination (Optional)
-            eth_dst: Ethernet (layer 2) destination address (Optional)
-        """
-        # Compute IP addresses
-        target_ip6 = ip_dst if ip_dst is not None else self.dst_ipv6
-        ndst_ip = socket.inet_pton(socket.AF_INET6, target_ip6)
-        nnode_mcast = scapy.in6_getnsma(ndst_ip)
-        node_mcast = socket.inet_ntop(socket.AF_INET6, nnode_mcast)
-        # Compute MAC addresses
-        hw_dst = eth_dst if eth_dst is not None else scapy.in6_getnsmac(nnode_mcast)
-
-        # Create IPv6 layer
-        base = scapy.IPv6(dst=node_mcast, src=self.src_ipv6)
-        neighbor_solicitation = scapy.ICMPv6ND_NS(tgt=target_ip6)
-        src_ll_addr = scapy.ICMPv6NDOptSrcLLAddr(lladdr=self.src_mac)
-        ip6 = base / neighbor_solicitation / src_ll_addr
-
-        # Create Ethernet layer
-        ethernet = scapy.Ether(src=self.src_mac, dst=hw_dst)
-
-        self.packet = ethernet / ip6
-        return self.packet
-
-
-class RaGenerator(object):
-    """Creates a custom Router Advertisement (RA) packet
-
-    Attributes:
-        packet: desired built custom packet
-        src_mac: MAC address (Layer 2) of the source node
-        src_ipv6_type: IPv6 source address type (e.g., Link Local, Global, etc)
-        src_ipv6: IPv6 address (Layer 3) of the source node
-    """
-
-    def __init__(self, **config_params):
-        """Initialize the class with the required network and packet params.
-
-        Args:
-            config_params: contains all the necessary packet parameters.
-              Some fields can be generated automatically. For example:
-              {'subnet_mask': '255.255.255.0',
-               'dst_ipv4': '192.168.1.3',
-               'src_ipv4: 'get_local', ...
-              The key can also be 'get_local' which means the code will read
-              and use the local interface parameters
-        """
-        interf = config_params["interf"]
-        self.packet = None
-        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
-            self.src_mac = scapy.get_if_hwaddr(interf)
-        else:
-            self.src_mac = config_params["src_mac"]
-
-        self.src_ipv6_type = config_params["src_ipv6_type"]
-        if config_params["src_ipv6"] == GET_FROM_LOCAL_INTERFACE:
-            self.src_ipv6 = get_if_addr6(interf, self.src_ipv6_type)
-        else:
-            self.src_ipv6 = config_params["src_ipv6"]
-
-    def generate(
-        self, lifetime, enableDNS=False, dns_lifetime=0, ip_dst=None, eth_dst=None
-    ):
-        """Generates a Router Advertisement (RA) packet (ICMP over IPv6).
-
-        Args:
-            lifetime: RA lifetime
-            enableDNS: Add RDNSS option to RA (Optional)
-            dns_lifetime: Set DNS server lifetime (Optional)
-            ip_dst: IPv6 destination address (Optional)
-            eth_dst: Ethernet (layer 2) destination address (Optional)
-        """
-        # Overwrite standard fields if desired
-        ip6_dst = ip_dst if ip_dst is not None else RA_IP
-        hw_dst = eth_dst if eth_dst is not None else RA_MAC
-
-        # Create IPv6 layer
-        base = scapy.IPv6(dst=ip6_dst, src=self.src_ipv6)
-        router_solicitation = scapy.ICMPv6ND_RA(routerlifetime=lifetime)
-        src_ll_addr = scapy.ICMPv6NDOptSrcLLAddr(lladdr=self.src_mac)
-        prefix = scapy.ICMPv6NDOptPrefixInfo(prefixlen=RA_PREFIX_LEN, prefix=RA_PREFIX)
-        if enableDNS:
-            rndss = scapy.ICMPv6NDOptRDNSS(
-                lifetime=dns_lifetime, dns=[self.src_ipv6], len=DNS_LEN
-            )
-            ip6 = base / router_solicitation / src_ll_addr / prefix / rndss
-        else:
-            ip6 = base / router_solicitation / src_ll_addr / prefix
-
-        # Create Ethernet layer
-        ethernet = scapy.Ether(src=self.src_mac, dst=hw_dst)
-
-        self.packet = ethernet / ip6
-        return self.packet
-
-
-class Ping6Generator(object):
-    """Creates a custom Ping v6 packet (i.e., ICMP over IPv6)
-
-    Attributes:
-        packet: desired built custom packet
-        src_mac: MAC address (Layer 2) of the source node
-        dst_mac: MAC address (Layer 2) of the destination node
-        src_ipv6_type: IPv6 source address type (e.g., Link Local, Global, etc)
-        src_ipv6: IPv6 address (Layer 3) of the source node
-        dst_ipv6: IPv6 address (Layer 3) of the destination node
-    """
-
-    def __init__(self, **config_params):
-        """Initialize the class with the required network and packet params.
-
-        Args:
-            config_params: contains all the necessary packet parameters.
-              Some fields can be generated automatically. For example:
-              {'subnet_mask': '255.255.255.0',
-               'dst_ipv4': '192.168.1.3',
-               'src_ipv4: 'get_local', ...
-              The key can also be 'get_local' which means the code will read
-              and use the local interface parameters
-        """
-        interf = config_params["interf"]
-        self.packet = None
-        self.dst_mac = config_params["dst_mac"]
-        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
-            self.src_mac = scapy.get_if_hwaddr(interf)
-        else:
-            self.src_mac = config_params["src_mac"]
-
-        self.dst_ipv6 = config_params["dst_ipv6"]
-        self.src_ipv6_type = config_params["src_ipv6_type"]
-        if config_params["src_ipv6"] == GET_FROM_LOCAL_INTERFACE:
-            self.src_ipv6 = get_if_addr6(interf, self.src_ipv6_type)
-        else:
-            self.src_ipv6 = config_params["src_ipv6"]
-
-    def generate(self, ip_dst=None, eth_dst=None):
-        """Generates a Ping6 packet (i.e., Echo Request)
-
-        Args:
-            ip_dst: IPv6 destination address (Optional)
-            eth_dst: Ethernet (layer 2) destination address (Optional)
-        """
-        # Overwrite standard fields if desired
-        ip6_dst = ip_dst if ip_dst is not None else self.dst_ipv6
-        hw_dst = eth_dst if eth_dst is not None else self.dst_mac
-
-        # Create IPv6 layer
-        base = scapy.IPv6(dst=ip6_dst, src=self.src_ipv6)
-        echo_request = scapy.ICMPv6EchoRequest(data=PING6_DATA)
-
-        ip6 = base / echo_request
-
-        # Create Ethernet layer
-        ethernet = scapy.Ether(src=self.src_mac, dst=hw_dst)
-
-        self.packet = ethernet / ip6
-        return self.packet
-
-
-class Ping4Generator(object):
-    """Creates a custom Ping v4 packet (i.e., ICMP over IPv4)
-
-    Attributes:
-        packet: desired built custom packet
-        src_mac: MAC address (Layer 2) of the source node
-        dst_mac: MAC address (Layer 2) of the destination node
-        src_ipv4: IPv4 address (Layer 3) of the source node
-        dst_ipv4: IPv4 address (Layer 3) of the destination node
-    """
-
-    def __init__(self, **config_params):
-        """Initialize the class with the required network and packet params.
-
-        Args:
-            config_params: contains all the necessary packet parameters.
-              Some fields can be generated automatically. For example:
-              {'subnet_mask': '255.255.255.0',
-               'dst_ipv4': '192.168.1.3',
-               'src_ipv4: 'get_local', ...
-              The key can also be 'get_local' which means the code will read
-              and use the local interface parameters
-        """
-        interf = config_params["interf"]
-        self.packet = None
-        self.dst_mac = config_params["dst_mac"]
-        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
-            self.src_mac = scapy.get_if_hwaddr(interf)
-        else:
-            self.src_mac = config_params["src_mac"]
-
-        self.dst_ipv4 = config_params["dst_ipv4"]
-        if config_params["src_ipv4"] == GET_FROM_LOCAL_INTERFACE:
-            self.src_ipv4 = scapy.get_if_addr(interf)
-        else:
-            self.src_ipv4 = config_params["src_ipv4"]
-
-    def generate(self, ip_dst=None, eth_dst=None):
-        """Generates a Ping4 packet (i.e., Echo Request)
-
-        Args:
-            ip_dst: IP destination address (Optional)
-            eth_dst: Ethernet (layer 2) destination address (Optional)
-        """
-
-        # Overwrite standard fields if desired
-        sta_ip = ip_dst if ip_dst is not None else self.dst_ipv4
-        sta_hw = eth_dst if eth_dst is not None else self.dst_mac
-
-        # Create IPv6 layer
-        base = scapy.IP(src=self.src_ipv4, dst=sta_ip)
-        echo_request = scapy.ICMP(type=PING4_TYPE)
-
-        ip4 = base / echo_request
-
-        # Create Ethernet layer
-        ethernet = scapy.Ether(src=self.src_mac, dst=sta_hw)
-
-        self.packet = ethernet / ip4
-        return self.packet
-
-
-class Mdns6Generator(object):
-    """Creates a custom mDNS IPv6 packet
-
-    Attributes:
-        packet: desired built custom packet
-        src_mac: MAC address (Layer 2) of the source node
-        src_ipv6_type: IPv6 source address type (e.g., Link Local, Global, etc)
-        src_ipv6: IPv6 address (Layer 3) of the source node
-    """
-
-    def __init__(self, **config_params):
-        """Initialize the class with the required network and packet params.
-
-        Args:
-            config_params: contains all the necessary packet parameters.
-              Some fields can be generated automatically. For example:
-              {'subnet_mask': '255.255.255.0',
-               'dst_ipv4': '192.168.1.3',
-               'src_ipv4: 'get_local', ...
-              The key can also be 'get_local' which means the code will read
-              and use the local interface parameters
-        """
-        interf = config_params["interf"]
-        self.packet = None
-        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
-            self.src_mac = scapy.get_if_hwaddr(interf)
-        else:
-            self.src_mac = config_params["src_mac"]
-
-        self.src_ipv6_type = config_params["src_ipv6_type"]
-        if config_params["src_ipv6"] == GET_FROM_LOCAL_INTERFACE:
-            self.src_ipv6 = get_if_addr6(interf, self.src_ipv6_type)
-        else:
-            self.src_ipv6 = config_params["src_ipv6"]
-
-    def generate(self, ip_dst=None, eth_dst=None):
-        """Generates a mDNS v6 packet for multicast DNS config
-
-        Args:
-            ip_dst: IPv6 destination address (Optional)
-            eth_dst: Ethernet (layer 2) destination address (Optional)
-        """
-
-        # Overwrite standard fields if desired
-        sta_ip = ip_dst if ip_dst is not None else MDNS_V6_IP_DST
-        sta_hw = eth_dst if eth_dst is not None else MDNS_V6_MAC_DST
-
-        # Create mDNS layer
-        qdServer = scapy.DNSQR(qname=self.src_ipv6, qtype=MDNS_QTYPE)
-        mDNS = scapy.DNS(rd=MDNS_RECURSIVE, qd=qdServer)
-
-        # Create UDP
-        udp = scapy.UDP(sport=MDNS_UDP_PORT, dport=MDNS_UDP_PORT)
-
-        # Create IP layer
-        ip6 = scapy.IPv6(src=self.src_ipv6, dst=sta_ip)
-
-        # Create Ethernet layer
-        ethernet = scapy.Ether(src=self.src_mac, dst=sta_hw)
-
-        self.packet = ethernet / ip6 / udp / mDNS
-        return self.packet
-
-
-class Mdns4Generator(object):
-    """Creates a custom mDNS v4 packet
-
-    Attributes:
-        packet: desired built custom packet
-        src_mac: MAC address (Layer 2) of the source node
-        src_ipv4: IPv4 address (Layer 3) of the source node
-    """
-
-    def __init__(self, **config_params):
-        """Initialize the class with the required network and packet params.
-
-        Args:
-            config_params: contains all the necessary packet parameters.
-              Some fields can be generated automatically. For example:
-              {'subnet_mask': '255.255.255.0',
-               'dst_ipv4': '192.168.1.3',
-               'src_ipv4: 'get_local', ...
-              The key can also be 'get_local' which means the code will read
-              and use the local interface parameters
-        """
-        interf = config_params["interf"]
-        self.packet = None
-        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
-            self.src_mac = scapy.get_if_hwaddr(interf)
-        else:
-            self.src_mac = config_params["src_mac"]
-
-        if config_params["src_ipv4"] == GET_FROM_LOCAL_INTERFACE:
-            self.src_ipv4 = scapy.get_if_addr(interf)
-        else:
-            self.src_ipv4 = config_params["src_ipv4"]
-
-    def generate(self, ip_dst=None, eth_dst=None):
-        """Generates a mDNS v4 packet for multicast DNS config
-
-        Args:
-            ip_dst: IP destination address (Optional)
-            eth_dst: Ethernet (layer 2) destination address (Optional)
-        """
-
-        # Overwrite standard fields if desired
-        sta_ip = ip_dst if ip_dst is not None else MDNS_V4_IP_DST
-        sta_hw = eth_dst if eth_dst is not None else MDNS_V4_MAC_DST
-
-        # Create mDNS layer
-        qdServer = scapy.DNSQR(qname=self.src_ipv4, qtype=MDNS_QTYPE)
-        mDNS = scapy.DNS(rd=MDNS_RECURSIVE, qd=qdServer)
-
-        # Create UDP
-        udp = scapy.UDP(sport=MDNS_UDP_PORT, dport=MDNS_UDP_PORT)
-
-        # Create IP layer
-        ip4 = scapy.IP(src=self.src_ipv4, dst=sta_ip, ttl=255)
-
-        # Create Ethernet layer
-        ethernet = scapy.Ether(src=self.src_mac, dst=sta_hw)
-
-        self.packet = ethernet / ip4 / udp / mDNS
-        return self.packet
-
-
-class Dot3Generator(object):
-    """Creates a custom 802.3 Ethernet Frame
-
-    Attributes:
-        packet: desired built custom packet
-        src_mac: MAC address (Layer 2) of the source node
-        src_ipv4: IPv4 address (Layer 3) of the source node
-    """
-
-    def __init__(self, **config_params):
-        """Initialize the class with the required network and packet params.
-
-        Args:
-            config_params: contains all the necessary packet parameters.
-              Some fields can be generated automatically. For example:
-              {'subnet_mask': '255.255.255.0',
-               'dst_ipv4': '192.168.1.3',
-               'src_ipv4: 'get_local', ...
-              The key can also be 'get_local' which means the code will read
-              and use the local interface parameters
-        """
-        interf = config_params["interf"]
-        self.packet = None
-        self.dst_mac = config_params["dst_mac"]
-        if config_params["src_mac"] == GET_FROM_LOCAL_INTERFACE:
-            self.src_mac = scapy.get_if_hwaddr(interf)
-        else:
-            self.src_mac = config_params["src_mac"]
-
-    def _build_ether(self, eth_dst=None):
-        """Creates the basic frame for 802.3
-
-        Args:
-            eth_dst: Ethernet (layer 2) destination address (Optional)
-        """
-        # Overwrite standard fields if desired
-        sta_hw = eth_dst if eth_dst is not None else self.dst_mac
-        # Create Ethernet layer
-        dot3_base = scapy.Dot3(src=self.src_mac, dst=sta_hw)
-
-        return dot3_base
-
-    def _pad_frame(self, frame):
-        """Pads the frame with default length and values
-
-        Args:
-            frame: Ethernet (layer 2) to be padded
-        """
-        frame.len = PAD_LEN_BYTES
-        pad = scapy.Padding()
-        pad.load = "\x00" * PAD_LEN_BYTES
-        return frame / pad
-
-    def generate(self, eth_dst=None):
-        """Generates the basic 802.3 frame and adds padding
-
-        Args:
-            eth_dst: Ethernet (layer 2) destination address (Optional)
-        """
-        # Create 802.3 Base
-        ethernet = self._build_ether(eth_dst)
-
-        self.packet = self._pad_frame(ethernet)
-        return self.packet
-
-    def generate_llc(self, eth_dst=None, dsap=2, ssap=3, ctrl=LLC_XID_CONTROL):
-        """Generates the 802.3 frame with LLC and adds padding
-
-        Args:
-            eth_dst: Ethernet (layer 2) destination address (Optional)
-            dsap: Destination Service Access Point (Optional)
-            ssap: Source Service Access Point (Optional)
-            ctrl: Control (Optional)
-        """
-        # Create 802.3 Base
-        ethernet = self._build_ether(eth_dst)
-
-        # Create LLC layer
-        llc = scapy.LLC(dsap=dsap, ssap=ssap, ctrl=ctrl)
-
-        # Append and create packet
-        self.packet = self._pad_frame(ethernet / llc)
-        return self.packet
-
-    def generate_snap(
-        self,
-        eth_dst=None,
-        dsap=SNAP_DSAP,
-        ssap=SNAP_SSAP,
-        ctrl=SNAP_CTRL,
-        oui=SNAP_OUI,
-        code=ETH_TYPE_IP,
-    ):
-        """Generates the 802.3 frame with LLC and SNAP and adds padding
-
-        Args:
-            eth_dst: Ethernet (layer 2) destination address (Optional)
-            dsap: Destination Service Access Point (Optional)
-            ssap: Source Service Access Point (Optional)
-            ctrl: Control (Optional)
-            oid: Protocol Id or Org Code (Optional)
-            code: EtherType (Optional)
-        """
-        # Create 802.3 Base
-        ethernet = self._build_ether(eth_dst)
-
-        # Create 802.2 LLC header
-        llc = scapy.LLC(dsap=dsap, ssap=ssap, ctrl=ctrl)
-
-        # Create 802.3 SNAP header
-        snap = scapy.SNAP(OUI=oui, code=code)
-
-        # Append and create packet
-        self.packet = self._pad_frame(ethernet / llc / snap)
-        return self.packet
-
-
-def get_if_addr6(intf, address_type):
-    """Returns the Ipv6 address from a given local interface.
-
-    Returns the desired IPv6 address from the interface 'intf' in human
-    readable form. The address type is indicated by the IPv6 constants like
-    IPV6_ADDR_LINKLOCAL, IPV6_ADDR_GLOBAL, etc. If no address is found,
-    None is returned.
-
-    Args:
-        intf: desired interface name
-        address_type: addrees typle like LINKLOCAL or GLOBAL
-
-    Returns:
-        Ipv6 address of the specified interface in human readable format
-    """
-    for if_list in scapy.in6_getifaddr():
-        if if_list[2] == intf and if_list[1] == address_type:
-            return if_list[0]
-
-    return None
diff --git a/src/antlion/controllers/pdu.py b/src/antlion/controllers/pdu.py
deleted file mode 100644
index 412742e..0000000
--- a/src/antlion/controllers/pdu.py
+++ /dev/null
@@ -1,215 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import importlib
-import logging
-
-from antlion import tracelogger
-
-MOBLY_CONTROLLER_CONFIG_NAME = "PduDevice"
-ACTS_CONTROLLER_REFERENCE_NAME = "pdu_devices"
-
-
-def create(configs):
-    """Creates a PduDevice for each config in configs.
-
-    Args:
-        configs: List of configs from PduDevice field.
-            Fields:
-                device: a string "<brand>.<model>" that corresponds to module
-                    in pdu_lib/
-                host: a string of the device ip address
-                username (optional): a string of the username for device sign-in
-                password (optional): a string of the password for device sign-in
-    Return:
-        A list of PduDevice objects.
-    """
-    if configs:
-        pdus = []
-        for config in configs:
-            device = config.get("device")
-            if not device:
-                raise PduError("Config must provide a device")
-
-            host = config.get("host")
-            if not device:
-                raise PduError("Config must provide a host ip address")
-            username = config.get("username")
-            password = config.get("password")
-            pdu = _create_device(device, host, username, password)
-            pdus.append(pdu)
-        return pdus
-
-
-def destroy(pdu_list):
-    """Ensure any connections to devices are closed.
-
-    Args:
-        pdu_list: A list of PduDevice objects.
-    """
-    for pdu in pdu_list:
-        pdu.close()
-
-
-def get_info(pdu_list):
-    """Retrieves info from a list of PduDevice objects.
-
-    Args:
-        pdu_list: A list of PduDevice objects.
-    Return:
-        A list containing a dictionary for each PduDevice, with keys:
-            'host': a string of the device ip address
-            'username': a string of the username
-            'password': a string of the password
-    """
-    info = []
-    for pdu in pdu_list:
-        info.append(
-            {"host": pdu.host, "username": pdu.username, "password": pdu.password}
-        )
-    return info
-
-
-def _create_device(device, host, username, password):
-    """Factory method that returns an instance of PduDevice implementation
-    based on the device string.
-    """
-    module_name = "antlion.controllers.pdu_lib." + device
-    module = importlib.import_module(module_name)
-    return module.PduDevice(host, username, password)
-
-
-def get_pdu_port_for_device(device_pdu_config, pdus):
-    """Retrieves the pdu object and port of that PDU powering a given device.
-    This is especially necessary when there are multilpe devices on a single PDU
-    or multiple PDUs registered.
-
-    Args:
-        device_pdu_config: a dict, representing the config of the device.
-        pdus: a list of registered PduDevice objects.
-
-    Returns:
-        A tuple: (PduObject for the device, string port number on that PDU).
-
-    Raises:
-        ValueError, if there is no PDU matching the given host in the config.
-
-    Example ACTS config:
-        ...
-        "testbed": [
-            ...
-            "FuchsiaDevice": [
-                {
-                    "ip": "<device_ip>",
-                    "ssh_config": "/path/to/sshconfig",
-                    "PduDevice": {
-                        "host": "192.168.42.185",
-                        "port": 2
-                    }
-                }
-            ],
-            "AccessPoint": [
-                {
-                    "ssh_config": {
-                        ...
-                    },
-                    "PduDevice": {
-                        "host": "192.168.42.185",
-                        "port" 1
-                    }
-                }
-            ],
-            "PduDevice": [
-                {
-                    "device": "synaccess.np02b",
-                    "host": "192.168.42.185"
-                }
-            ]
-        ],
-        ...
-    """
-    pdu_ip = device_pdu_config["host"]
-    port = device_pdu_config["port"]
-    for pdu in pdus:
-        if pdu.host == pdu_ip:
-            return pdu, port
-    raise ValueError("No PduDevice with host: %s" % pdu_ip)
-
-
-class PduDevice(object):
-    """An object that defines the basic Pdu functionality and abstracts
-    the actual hardware.
-
-    This is a pure abstract class. Implementations should be of the same
-    class name (eg. class PduDevice(pdu.PduDevice)) and exist in
-    pdu_lib/<brand>/<device_name>.py. PduDevice objects should not be
-    instantiated by users directly.
-    """
-
-    def __init__(self, host, username, password):
-        if type(self) is PduDevice:
-            raise NotImplementedError("Base class: cannot be instantiated directly")
-        self.host = host
-        self.username = username
-        self.password = password
-        self.log = tracelogger.TraceLogger(logging.getLogger())
-
-    def on_all(self):
-        """Turns on all outlets on the device."""
-        raise NotImplementedError("Base class: cannot be called directly")
-
-    def off_all(self):
-        """Turns off all outlets on the device."""
-        raise NotImplementedError("Base class: cannot be called directly")
-
-    def on(self, outlet):
-        """Turns on specific outlet on the device.
-        Args:
-            outlet: a string of the outlet to turn on.
-        """
-        raise NotImplementedError("Base class: cannot be called directly")
-
-    def off(self, outlet):
-        """Turns off specific outlet on the device.
-        Args:
-            outlet: a string of the outlet to turn off.
-        """
-        raise NotImplementedError("Base class: cannot be called directly")
-
-    def reboot(self, outlet):
-        """Toggles a specific outlet on the device to off, then to on.
-        Args:
-            outlet: a string of the outlet to reboot.
-        """
-        raise NotImplementedError("Base class: cannot be called directly")
-
-    def status(self):
-        """Retrieves the status of the outlets on the device.
-
-        Return:
-            A dictionary matching outlet string to:
-                True: if outlet is On
-                False: if outlet is Off
-        """
-        raise NotImplementedError("Base class: cannot be called directly")
-
-    def close(self):
-        """Closes connection to the device."""
-        raise NotImplementedError("Base class: cannot be called directly")
-
-
-class PduError(Exception):
-    """An exception for use within PduDevice implementations"""
diff --git a/src/antlion/controllers/pdu_lib/synaccess/np02b.py b/src/antlion/controllers/pdu_lib/synaccess/np02b.py
deleted file mode 100644
index 70624f0..0000000
--- a/src/antlion/controllers/pdu_lib/synaccess/np02b.py
+++ /dev/null
@@ -1,187 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import utils
-from antlion.controllers import pdu
-
-import re
-import telnetlib
-import time
-
-
-class PduDevice(pdu.PduDevice):
-    """Implementation of pure abstract PduDevice object for the Synaccess np02b
-    Pdu.
-    """
-
-    def __init__(self, host, username, password):
-        super(PduDevice, self).__init__(host, username, password)
-        self.tnhelper = _TNHelperNP02B(host)
-
-    def on_all(self):
-        """Turns on both outlets on the np02b."""
-        self.tnhelper.cmd("ps 1")
-        self._verify_state({"1": True, "2": True})
-
-    def off_all(self):
-        """Turns off both outlets on the np02b."""
-        self.tnhelper.cmd("ps 0")
-        self._verify_state({"1": False, "2": False})
-
-    def on(self, outlet):
-        """Turns on specific outlet on the np02b.
-
-        Args:
-            outlet: string of the outlet to turn on ('1' or '2')
-        """
-        self.tnhelper.cmd("pset %s 1" % outlet)
-        self._verify_state({outlet: True})
-
-    def off(self, outlet):
-        """Turns off a specifc outlet on the np02b.
-
-        Args:
-            outlet: string of the outlet to turn off ('1' or '2')
-        """
-        self.tnhelper.cmd("pset %s 0" % outlet)
-        self._verify_state({outlet: False})
-
-    def reboot(self, outlet):
-        """Toggles a specific outlet on the np02b to off, then to on.
-
-        Args:
-            outlet: string of the outlet to reboot ('1' or '2')
-        """
-        self.off(outlet)
-        self._verify_state({outlet: False})
-        self.on(outlet)
-        self._verify_state({outlet: True})
-
-    def status(self):
-        """Returns the status of the np02b outlets.
-
-        Return:
-            a dict mapping outlet strings ('1' and '2') to:
-                True if outlet is ON
-                False if outlet is OFF
-        """
-        res = self.tnhelper.cmd("pshow")
-        status_list = re.findall("(ON|OFF)", res)
-        status_dict = {}
-        for i, status in enumerate(status_list):
-            status_dict[str(i + 1)] = status == "ON"
-        return status_dict
-
-    def close(self):
-        """Ensure connection to device is closed.
-
-        In this implementation, this shouldn't be necessary, but could be in
-        others that open on creation.
-        """
-        self.tnhelper.close()
-
-    def _verify_state(self, expected_state, timeout=3):
-        """Returns when expected_state is reached on device.
-
-        In order to prevent command functions from exiting until the desired
-        effect has occurred, this function verifys that the expected_state is a
-        subset of the desired state.
-
-        Args:
-            expected_state: a dict representing the expected state of one or
-                more outlets on the device. Maps outlet strings ('1' and/or '2')
-                to:
-                    True if outlet is expected to be ON.
-                    False if outlet is expected to be OFF.
-            timeout (default: 3): time in seconds until raising an exception.
-
-        Return:
-            True, if expected_state is reached.
-
-        Raises:
-            PduError if expected_state has not been reached by timeout.
-        """
-        end_time = time.time() + timeout
-        while time.time() < end_time:
-            actual_state = self.status()
-            if expected_state.items() <= actual_state.items():
-                return True
-            time.sleep(0.1)
-        raise pdu.PduError(
-            "Timeout while verifying state.\n"
-            "Expected State: %s\n"
-            "Actual State: %s" % (expected_state, actual_state)
-        )
-
-
-class _TNHelperNP02B(object):
-    """An internal helper class for Telnet with the Synaccess NP02B Pdu. This
-    helper is specific to the idiosyncrasies of the NP02B and therefore should
-    not be used with other devices.
-    """
-
-    def __init__(self, host):
-        self._tn = telnetlib.Telnet()
-        self.host = host
-        self.tx_cmd_separator = "\n\r"
-        self.rx_cmd_separator = "\r\n"
-        self.prompt = ">"
-
-    """
-    Executes a command on the device via telnet.
-    Args:
-        cmd_str: A string of the command to be run.
-    Returns:
-        A string of the response from the valid command (often empty).
-    """
-
-    def cmd(self, cmd_str):
-        # Open session
-        try:
-            self._tn.open(self.host, timeout=3)
-        except:
-            raise pdu.PduError("Failed to open telnet session to host (%s)" % self.host)
-        time.sleep(0.1)
-
-        # Read to end of first prompt
-        cmd_str.strip(self.tx_cmd_separator)
-        self._tn.read_eager()
-        time.sleep(0.1)
-
-        # Write command and read all output text
-        self._tn.write(utils.ascii_string(cmd_str + self.tx_cmd_separator))
-        res = self._tn.read_until(utils.ascii_string(self.prompt), 2)
-
-        # Parses out the commands output
-        if res is None:
-            raise pdu.PduError("Command failed: %s" % cmd_str)
-        res = res.decode()
-        if re.search("Invalid", res):
-            raise pdu.PduError("Command Invalid: %s" % cmd_str)
-        res = res.replace(self.prompt, "")
-        res = res.replace(self.tx_cmd_separator, "")
-        res = res.replace(self.rx_cmd_separator, "")
-        res = res.replace(cmd_str, "")
-
-        # Close session
-        self._tn.close()
-
-        time.sleep(0.5)
-
-        return res
-
-    def close(self):
-        self._tn.close()
diff --git a/src/antlion/controllers/sl4a_lib/sl4a_types.py b/src/antlion/controllers/sl4a_lib/sl4a_types.py
deleted file mode 100644
index 434ff92..0000000
--- a/src/antlion/controllers/sl4a_lib/sl4a_types.py
+++ /dev/null
@@ -1,57 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.dict_object import DictObject
-
-
-class Sl4aEvent(DictObject):
-    """Event returned by sl4a calls to eventPoll() and eventWait()
-
-    The 'name' field uniquely identifies the contents of 'data'.
-
-    """
-
-    def __init__(self, name=None, time=None, data=None):
-        DictObject.__init__(self, name=name, time=time, data=data)
-
-
-class Sl4aNetworkInfo(DictObject):
-    """SL4A equivalent of an Android NetworkInfo Object"""
-
-    def __init__(
-        self,
-        isAvailable=None,
-        isConnected=None,
-        isFailover=None,
-        isRoaming=None,
-        ExtraInfo=None,
-        FailedReason=None,
-        TypeName=None,
-        SubtypeName=None,
-        State=None,
-    ):
-        DictObject.__init__(
-            self,
-            isAvailable=isAvailable,
-            isConnected=isConnected,
-            isFailover=isFailover,
-            isRoaming=isRoaming,
-            ExtraInfo=ExtraInfo,
-            FailedReason=FailedReason,
-            TypeName=TypeName,
-            SubtypeName=SubtypeName,
-            State=State,
-        )
diff --git a/src/antlion/controllers/utils_lib/commands/ip.py b/src/antlion/controllers/utils_lib/commands/ip.py
deleted file mode 100644
index 7e028b1..0000000
--- a/src/antlion/controllers/utils_lib/commands/ip.py
+++ /dev/null
@@ -1,160 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import ipaddress
-import re
-
-from antlion.libs.proc import job
-
-
-class LinuxIpCommand(object):
-    """Interface for doing standard IP commands on a linux system.
-
-    Wraps standard shell commands used for ip into a python object that can
-    be interacted with more easily.
-    """
-
-    def __init__(self, runner):
-        """
-        Args:
-            runner: Object that can take unix commands and run them in an
-                    enviroment (eg. connection.SshConnection).
-        """
-        self._runner = runner
-
-    def get_ipv4_addresses(self, net_interface):
-        """Gets all ipv4 addresses of a network interface.
-
-        Args:
-            net_interface: string, The network interface to get info on
-                           (eg. wlan0).
-
-        Returns: An iterator of tuples that contain (address, broadcast).
-                 where address is a ipaddress.IPv4Interface and broadcast
-                 is an ipaddress.IPv4Address.
-        """
-        results = self._runner.run("ip addr show dev %s" % net_interface)
-        lines = results.stdout.splitlines()
-
-        # Example stdout:
-        # 2: eth0: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc mq state UP group default qlen 1000
-        #   link/ether 48:0f:cf:3c:9d:89 brd ff:ff:ff:ff:ff:ff
-        #   inet 192.168.1.1/24 brd 192.168.1.255 scope global eth0
-        #       valid_lft forever preferred_lft forever
-        #   inet6 2620:0:1000:1500:a968:a776:2d80:a8b3/64 scope global temporary dynamic
-        #       valid_lft 599919sec preferred_lft 80919sec
-
-        for line in lines:
-            line = line.strip()
-            match = re.search("inet (?P<address>[^\s]*) brd (?P<bcast>[^\s]*)", line)
-            if match:
-                d = match.groupdict()
-                address = ipaddress.IPv4Interface(d["address"])
-                bcast = ipaddress.IPv4Address(d["bcast"])
-                yield (address, bcast)
-
-            match = re.search("inet (?P<address>[^\s]*)", line)
-            if match:
-                d = match.groupdict()
-                address = ipaddress.IPv4Interface(d["address"])
-                yield (address, None)
-
-    def add_ipv4_address(self, net_interface, address, broadcast=None):
-        """Adds an ipv4 address to a net_interface.
-
-        Args:
-            net_interface: string, The network interface
-                           to get the new ipv4 (eg. wlan0).
-            address: ipaddress.IPv4Interface, The new ipaddress and netmask
-                     to add to an interface.
-            broadcast: ipaddress.IPv4Address, The broadcast address to use for
-                       this net_interfaces subnet.
-        """
-        if broadcast:
-            self._runner.run(
-                "ip addr add %s broadcast %s dev %s"
-                % (address, broadcast, net_interface)
-            )
-        else:
-            self._runner.run("ip addr add %s dev %s" % (address, net_interface))
-
-    def remove_ipv4_address(self, net_interface, address, ignore_status=False):
-        """Remove an ipv4 address.
-
-        Removes an ipv4 address from a network interface.
-
-        Args:
-            net_interface: string, The network interface to remove the
-                           ipv4 address from (eg. wlan0).
-            address: ipaddress.IPv4Interface or ipaddress.IPv4Address,
-                     The ip address to remove from the net_interface.
-            ignore_status: True if the exit status can be ignored
-        Returns:
-            The job result from a the command
-        """
-        return self._runner.run(
-            "ip addr del %s dev %s" % (address, net_interface),
-            ignore_status=ignore_status,
-        )
-
-    def set_ipv4_address(self, net_interface, address, broadcast=None):
-        """Set the ipv4 address.
-
-        Sets the ipv4 address of a network interface. If the network interface
-        has any other ipv4 addresses these will be cleared.
-
-        Args:
-            net_interface: string, The network interface to set the ip address
-                           on (eg. wlan0).
-            address: ipaddress.IPv4Interface, The ip address and subnet to give
-                     the net_interface.
-            broadcast: ipaddress.IPv4Address, The broadcast address to use for
-                       the subnet.
-        """
-        self.clear_ipv4_addresses(net_interface)
-        self.add_ipv4_address(net_interface, address, broadcast)
-
-    def clear_ipv4_addresses(self, net_interface):
-        """Clears all ipv4 addresses registered to a net_interface.
-
-        Args:
-            net_interface: string, The network interface to clear addresses from
-                           (eg. wlan0).
-        """
-        ip_info = self.get_ipv4_addresses(net_interface)
-
-        for address, _ in ip_info:
-            result = self.remove_ipv4_address(
-                net_interface, address, ignore_status=True
-            )
-            # It is possible that the address has already been removed by the
-            # time this command has been called. In such a case, we would get
-            # this error message.
-            error_msg = "RTNETLINK answers: Cannot assign requested address"
-            if result.exit_status != 0:
-                if error_msg in result.stderr:
-                    # If it was removed by another process, log a warning
-                    if address not in self.get_ipv4_addresses(net_interface):
-                        self._runner.log.warning(
-                            "Unable to remove address %s. The address was "
-                            "removed by another process." % address
-                        )
-                        continue
-                    # If it was not removed, raise an error
-                    self._runner.log.error(
-                        "Unable to remove address %s. The address is still "
-                        "registered to %s, despite call for removal."
-                        % (address, net_interface)
-                    )
-                raise job.Error(result)
diff --git a/src/antlion/controllers/utils_lib/commands/route.py b/src/antlion/controllers/utils_lib/commands/route.py
deleted file mode 100644
index a886455..0000000
--- a/src/antlion/controllers/utils_lib/commands/route.py
+++ /dev/null
@@ -1,195 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import ipaddress
-import re
-
-from antlion.controllers.utils_lib.ssh import connection
-
-
-class Error(Exception):
-    """Exception thrown when a valid ip command experiences errors."""
-
-
-class NetworkInterfaceDown(Error):
-    """Exception thrown when a network interface is down."""
-
-
-class LinuxRouteCommand(object):
-    """Interface for doing standard ip route commands on a linux system."""
-
-    DEFAULT_ROUTE = "default"
-
-    def __init__(self, runner):
-        """
-        Args:
-            runner: Object that can take unix commands and run them in an
-                    environment.
-        """
-        self._runner = runner
-
-    def add_route(self, net_interface, address, proto="static"):
-        """Add an entry to the ip routing table.
-
-        Will add a route for either a specific ip address, or a network.
-
-        Args:
-            net_interface: string, Any packet that sends through this route
-                           will be sent using this network interface
-                           (eg. wlan0).
-            address: ipaddress.IPv4Address, ipaddress.IPv4Network,
-                     or DEFAULT_ROUTE. The address to use. If a network
-                     is given then the entire subnet will be routed.
-                     If DEFAULT_ROUTE is given then this will set the
-                     default route.
-            proto: string, Routing protocol identifier of this route
-                   (e.g. kernel, redirect, boot, static, ra).
-                   See `man ip-route(8)` for details.
-
-        Raises:
-            NetworkInterfaceDown: Raised when the network interface is down.
-        """
-        try:
-            self._runner.run(
-                f"ip route add {address} dev {net_interface} proto {proto}"
-            )
-        except connection.CommandError as e:
-            if "File exists" in e.result.stderr:
-                raise Error("Route already exists.")
-            if "Network is down" in e.result.stderr:
-                raise NetworkInterfaceDown("Device must be up for adding a route.")
-            raise
-
-    def get_routes(self, net_interface=None):
-        """Get the routes in the ip routing table.
-
-        Args:
-            net_interface: string, If given, only retrieve routes that have
-                           been registered to go through this network
-                           interface (eg. wlan0).
-
-        Returns: An iterator that returns a tuple of (address, net_interface).
-                 If it is the default route then address
-                 will be the DEFAULT_ROUTE. If the route is a subnet then
-                 it will be a ipaddress.IPv4Network otherwise it is a
-                 ipaddress.IPv4Address.
-        """
-        result_ipv4 = self._runner.run("ip -4 route show")
-        result_ipv6 = self._runner.run("ip -6 route show")
-
-        lines = result_ipv4.stdout.splitlines() + result_ipv6.stdout.splitlines()
-
-        # Scan through each line for valid route entries
-        # Example output:
-        # default via 192.168.1.254 dev eth0  proto static
-        # 192.168.1.0/24 dev eth0  proto kernel  scope link  src 172.22.100.19  metric 1
-        # 192.168.2.1 dev eth2 proto kernel scope link metric 1
-        # fe80::/64 dev wlan0 proto static metric 1024
-        for line in lines:
-            if not "dev" in line:
-                continue
-
-            if line.startswith(self.DEFAULT_ROUTE):
-                # The default route entry is formatted differently.
-                match = re.search("dev (?P<net_interface>\S+)", line)
-                pair = None
-                if match:
-                    # When there is a match for the route entry pattern create
-                    # A pair to hold the info.
-                    pair = (self.DEFAULT_ROUTE, match.groupdict()["net_interface"])
-            else:
-                # Test the normal route entry pattern.
-                match = re.search(
-                    "(?P<address>[0-9A-Fa-f\.\:/]+) dev (?P<net_interface>\S+)", line
-                )
-                pair = None
-                if match:
-                    # When there is a match for the route entry pattern create
-                    # A pair to hold the info.
-                    d = match.groupdict()
-                    # Route can be either a network or specific address
-                    try:
-                        address = ipaddress.ip_address(d["address"])
-                    except ValueError:
-                        address = d["address"]
-
-                    pair = (address, d["net_interface"])
-
-            # No pair means no pattern was found.
-            if not pair:
-                continue
-
-            if net_interface:
-                # If a net_interface was passed in then only give the pair when it is
-                # The correct net_interface.
-                if pair[1] == net_interface:
-                    yield pair
-            else:
-                # No net_interface given give all valid route entries.
-                yield pair
-
-    def is_route(self, address, net_interface=None):
-        """Checks to see if a route exists.
-
-        Args:
-            address: ipaddress.IPv4Address, ipaddress.IPv4Network,
-                     or DEFAULT_ROUTE, The address to use.
-            net_interface: string, If specified, the route must be
-                           registered to go through this network interface
-                           (eg. wlan0).
-
-        Returns: True if the route is found, False otherwise.
-        """
-        for route, _ in self.get_routes(net_interface):
-            if route == address:
-                return True
-
-        return False
-
-    def remove_route(self, address, net_interface=None):
-        """Removes a route from the ip routing table.
-
-        Removes a route from the ip routing table. If the route does not exist
-        nothing is done.
-
-        Args:
-            address: ipaddress.IPv4Address, ipaddress.IPv4Network,
-                     or DEFAULT_ROUTE, The address of the route to remove.
-            net_interface: string, If specified the route being removed is
-                           registered to go through this network interface
-                           (eg. wlan0)
-        """
-        try:
-            if net_interface:
-                self._runner.run("ip route del %s dev %s" % (address, net_interface))
-            else:
-                self._runner.run("ip route del %s" % address)
-        except connection.CommandError as e:
-            if "No such process" in e.result.stderr:
-                # The route didn't exist.
-                return
-            raise
-
-    def clear_routes(self, net_interface=None):
-        """Clears all routes.
-
-        Args:
-            net_interface: The network interface to clear routes on.
-            If not given then all routes will be removed on all network
-            interfaces (eg. wlan0).
-        """
-        routes = self.get_routes(net_interface)
-
-        for a, d in routes:
-            self.remove_route(a, d)
diff --git a/src/antlion/controllers/utils_lib/host_utils.py b/src/antlion/controllers/utils_lib/host_utils.py
deleted file mode 100644
index 1b66089..0000000
--- a/src/antlion/controllers/utils_lib/host_utils.py
+++ /dev/null
@@ -1,62 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import socket
-
-
-def get_available_host_port():
-    """Finds a semi-random available port.
-
-    A race condition is still possible after the port number is returned, if
-    another process happens to bind it.
-
-    Returns:
-        A port number that is unused on both TCP and UDP.
-    """
-    # On the 2.6 kernel, calling _try_bind() on UDP socket returns the
-    # same port over and over. So always try TCP first.
-    while True:
-        # Ask the OS for an unused port.
-        port = _try_bind(0, socket.SOCK_STREAM, socket.IPPROTO_TCP)
-        # Check if this port is unused on the other protocol.
-        if port and _try_bind(port, socket.SOCK_DGRAM, socket.IPPROTO_UDP):
-            return port
-
-
-def is_port_available(port):
-    """Checks if a given port number is available on the system.
-
-    Args:
-        port: An integer which is the port number to check.
-
-    Returns:
-        True if the port is available; False otherwise.
-    """
-    return _try_bind(port, socket.SOCK_STREAM, socket.IPPROTO_TCP) and _try_bind(
-        port, socket.SOCK_DGRAM, socket.IPPROTO_UDP
-    )
-
-
-def _try_bind(port, socket_type, socket_proto):
-    s = socket.socket(socket.AF_INET, socket_type, socket_proto)
-    try:
-        try:
-            s.bind(("", port))
-            # The result of getsockname() is protocol dependent, but for both
-            # IPv4 and IPv6 the second field is a port number.
-            return s.getsockname()[1]
-        except socket.error:
-            return None
-    finally:
-        s.close()
diff --git a/src/antlion/controllers/utils_lib/ssh/connection.py b/src/antlion/controllers/utils_lib/ssh/connection.py
deleted file mode 100644
index 23c80d7..0000000
--- a/src/antlion/controllers/utils_lib/ssh/connection.py
+++ /dev/null
@@ -1,460 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-import os
-import re
-import shutil
-import tempfile
-import threading
-import time
-import uuid
-
-from antlion import logger
-from antlion.controllers.utils_lib import host_utils
-from antlion.controllers.utils_lib.ssh import formatter
-from antlion.libs.proc import job
-
-
-class Error(Exception):
-    """An error occurred during an ssh operation."""
-
-
-class CommandError(Exception):
-    """An error occurred with the command.
-
-    Attributes:
-        result: The results of the ssh command that had the error.
-    """
-
-    def __init__(self, result):
-        """
-        Args:
-            result: The result of the ssh command that created the problem.
-        """
-        self.result = result
-
-    def __str__(self):
-        return "cmd: %s\nstdout: %s\nstderr: %s" % (
-            self.result.command,
-            self.result.stdout,
-            self.result.stderr,
-        )
-
-
-_Tunnel = collections.namedtuple("_Tunnel", ["local_port", "remote_port", "proc"])
-
-
-class SshConnection(object):
-    """Provides a connection to a remote machine through ssh.
-
-    Provides the ability to connect to a remote machine and execute a command
-    on it. The connection will try to establish a persistent connection When
-    a command is run. If the persistent connection fails it will attempt
-    to connect normally.
-    """
-
-    @property
-    def socket_path(self):
-        """Returns: The os path to the master socket file."""
-        return os.path.join(self._master_ssh_tempdir, "socket")
-
-    def __init__(self, settings):
-        """
-        Args:
-            settings: The ssh settings to use for this connection.
-            formatter: The object that will handle formatting ssh command
-                       for use with the background job.
-        """
-        self._settings = settings
-        self._formatter = formatter.SshFormatter()
-        self._lock = threading.Lock()
-        self._master_ssh_proc = None
-        self._master_ssh_tempdir = None
-        self._tunnels = list()
-
-        def log_line(msg):
-            return "[SshConnection | %s] %s" % (self._settings.hostname, msg)
-
-        self.log = logger.create_logger(log_line)
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, _, __, ___):
-        self.close()
-
-    def __del__(self):
-        self.close()
-
-    def setup_master_ssh(self, timeout_seconds=5):
-        """Sets up the master ssh connection.
-
-        Sets up the initial master ssh connection if it has not already been
-        started.
-
-        Args:
-            timeout_seconds: The time to wait for the master ssh connection to
-            be made.
-
-        Raises:
-            Error: When setting up the master ssh connection fails.
-        """
-        with self._lock:
-            if self._master_ssh_proc is not None:
-                socket_path = self.socket_path
-                if (
-                    not os.path.exists(socket_path)
-                    or self._master_ssh_proc.poll() is not None
-                ):
-                    self.log.debug(
-                        "Master ssh connection to %s is down.", self._settings.hostname
-                    )
-                    self._cleanup_master_ssh()
-
-            if self._master_ssh_proc is None:
-                # Create a shared socket in a temp location.
-                self._master_ssh_tempdir = tempfile.mkdtemp(prefix="ssh-master")
-
-                # Setup flags and options for running the master ssh
-                # -N: Do not execute a remote command.
-                # ControlMaster: Spawn a master connection.
-                # ControlPath: The master connection socket path.
-                extra_flags = {"-N": None}
-                extra_options = {
-                    "ControlMaster": True,
-                    "ControlPath": self.socket_path,
-                    "BatchMode": True,
-                }
-
-                # Construct the command and start it.
-                master_cmd = self._formatter.format_ssh_local_command(
-                    self._settings, extra_flags=extra_flags, extra_options=extra_options
-                )
-                self.log.info("Starting master ssh connection.")
-                self._master_ssh_proc = job.run_async(master_cmd)
-
-                end_time = time.time() + timeout_seconds
-
-                while time.time() < end_time:
-                    if os.path.exists(self.socket_path):
-                        break
-                    time.sleep(0.2)
-                else:
-                    self._cleanup_master_ssh()
-                    raise Error("Master ssh connection timed out.")
-
-    def run(
-        self,
-        command,
-        timeout=60,
-        ignore_status=False,
-        env=None,
-        io_encoding="utf-8",
-        attempts=2,
-    ):
-        """Runs a remote command over ssh.
-
-        Will ssh to a remote host and run a command. This method will
-        block until the remote command is finished.
-
-        Args:
-            command: The command to execute over ssh. Can be either a string
-                     or a list.
-            timeout: number seconds to wait for command to finish.
-            ignore_status: bool True to ignore the exit code of the remote
-                           subprocess.  Note that if you do ignore status codes,
-                           you should handle non-zero exit codes explicitly.
-            env: dict environment variables to setup on the remote host.
-            io_encoding: str unicode encoding of command output.
-            attempts: Number of attempts before giving up on command failures.
-
-        Returns:
-            A job.Result containing the results of the ssh command.
-
-        Raises:
-            job.TimeoutError: When the remote command took to long to execute.
-            Error: When the ssh connection failed to be created.
-            CommandError: Ssh worked, but the command had an error executing.
-        """
-        if attempts == 0:
-            return None
-        if env is None:
-            env = {}
-
-        try:
-            self.setup_master_ssh(self._settings.connect_timeout)
-        except Error:
-            self.log.warning(
-                "Failed to create master ssh connection, using "
-                "normal ssh connection."
-            )
-
-        extra_options = {"BatchMode": True}
-        if self._master_ssh_proc:
-            extra_options["ControlPath"] = self.socket_path
-
-        identifier = str(uuid.uuid4())
-        full_command = 'echo "CONNECTED: %s"; %s' % (identifier, command)
-
-        terminal_command = self._formatter.format_command(
-            full_command, env, self._settings, extra_options=extra_options
-        )
-
-        dns_retry_count = 2
-        while True:
-            result = job.run(
-                terminal_command,
-                ignore_status=True,
-                timeout=timeout,
-                io_encoding=io_encoding,
-            )
-            output = result.stdout
-
-            # Check for a connected message to prevent false negatives.
-            valid_connection = re.search(
-                "^CONNECTED: %s" % identifier, output, flags=re.MULTILINE
-            )
-            if valid_connection:
-                # Remove the first line that contains the connect message.
-                line_index = output.find("\n") + 1
-                if line_index == 0:
-                    line_index = len(output)
-                real_output = output[line_index:].encode(io_encoding)
-
-                result = job.Result(
-                    command=result.command,
-                    stdout=real_output,
-                    stderr=result._raw_stderr,
-                    exit_status=result.exit_status,
-                    duration=result.duration,
-                    did_timeout=result.did_timeout,
-                    encoding=io_encoding,
-                )
-                if result.exit_status and not ignore_status:
-                    raise job.Error(result)
-                return result
-
-            error_string = result.stderr
-
-            had_dns_failure = result.exit_status == 255 and re.search(
-                r"^ssh: .*: Name or service not known", error_string, flags=re.MULTILINE
-            )
-            if had_dns_failure:
-                dns_retry_count -= 1
-                if not dns_retry_count:
-                    raise Error("DNS failed to find host.", result)
-                self.log.debug("Failed to connect to host, retrying...")
-            else:
-                break
-
-        had_timeout = re.search(
-            r"^ssh: connect to host .* port .*: " r"Connection timed out\r$",
-            error_string,
-            flags=re.MULTILINE,
-        )
-        if had_timeout:
-            raise Error("Ssh timed out.", result)
-
-        permission_denied = "Permission denied" in error_string
-        if permission_denied:
-            raise Error("Permission denied.", result)
-
-        unknown_host = re.search(
-            r"ssh: Could not resolve hostname .*: " r"Name or service not known",
-            error_string,
-            flags=re.MULTILINE,
-        )
-        if unknown_host:
-            raise Error("Unknown host.", result)
-
-        self.log.error("An unknown error has occurred. Job result: %s" % result)
-        ping_output = job.run(
-            "ping %s -c 3 -w 1" % self._settings.hostname, ignore_status=True
-        )
-        self.log.error("Ping result: %s" % ping_output)
-        if attempts > 1:
-            self._cleanup_master_ssh()
-            self.run(command, timeout, ignore_status, env, io_encoding, attempts - 1)
-        raise Error("The job failed for unknown reasons.", result)
-
-    def run_async(self, command, env=None):
-        """Starts up a background command over ssh.
-
-        Will ssh to a remote host and startup a command. This method will
-        block until there is confirmation that the remote command has started.
-
-        Args:
-            command: The command to execute over ssh. Can be either a string
-                     or a list.
-            env: A dictionary of environment variables to setup on the remote
-                 host.
-
-        Returns:
-            The result of the command to launch the background job.
-
-        Raises:
-            CmdTimeoutError: When the remote command took to long to execute.
-            SshTimeoutError: When the connection took to long to established.
-            SshPermissionDeniedError: When permission is not allowed on the
-                                      remote host.
-        """
-        return self.run(
-            f"({command}) < /dev/null > /dev/null 2>&1 & echo -n $!", env=env
-        )
-
-    def close(self):
-        """Clean up open connections to remote host."""
-        self._cleanup_master_ssh()
-        while self._tunnels:
-            self.close_ssh_tunnel(self._tunnels[0].local_port)
-
-    def _cleanup_master_ssh(self):
-        """
-        Release all resources (process, temporary directory) used by an active
-        master SSH connection.
-        """
-        # If a master SSH connection is running, kill it.
-        if self._master_ssh_proc is not None:
-            self.log.debug("Nuking master_ssh_job.")
-            self._master_ssh_proc.kill()
-            self._master_ssh_proc.wait()
-            self._master_ssh_proc = None
-
-        # Remove the temporary directory for the master SSH socket.
-        if self._master_ssh_tempdir is not None:
-            self.log.debug("Cleaning master_ssh_tempdir.")
-            shutil.rmtree(self._master_ssh_tempdir)
-            self._master_ssh_tempdir = None
-
-    def create_ssh_tunnel(self, port, local_port=None):
-        """Create an ssh tunnel from local_port to port.
-
-        This securely forwards traffic from local_port on this machine to the
-        remote SSH host at port.
-
-        Args:
-            port: remote port on the host.
-            local_port: local forwarding port, or None to pick an available
-                        port.
-
-        Returns:
-            the created tunnel process.
-        """
-        if not local_port:
-            local_port = host_utils.get_available_host_port()
-        else:
-            for tunnel in self._tunnels:
-                if tunnel.remote_port == port:
-                    return tunnel.local_port
-
-        extra_flags = {
-            "-n": None,  # Read from /dev/null for stdin
-            "-N": None,  # Do not execute a remote command
-            "-q": None,  # Suppress warnings and diagnostic commands
-            "-L": "%d:localhost:%d" % (local_port, port),
-        }
-        extra_options = dict()
-        if self._master_ssh_proc:
-            extra_options["ControlPath"] = self.socket_path
-        tunnel_cmd = self._formatter.format_ssh_local_command(
-            self._settings, extra_flags=extra_flags, extra_options=extra_options
-        )
-        self.log.debug("Full tunnel command: %s", tunnel_cmd)
-        # Exec the ssh process directly so that when we deliver signals, we
-        # deliver them straight to the child process.
-        tunnel_proc = job.run_async(tunnel_cmd)
-        self.log.debug(
-            "Started ssh tunnel, local = %d remote = %d, pid = %d",
-            local_port,
-            port,
-            tunnel_proc.pid,
-        )
-        self._tunnels.append(_Tunnel(local_port, port, tunnel_proc))
-        return local_port
-
-    def close_ssh_tunnel(self, local_port):
-        """Close a previously created ssh tunnel of a TCP port.
-
-        Args:
-            local_port: int port on localhost previously forwarded to the remote
-                        host.
-
-        Returns:
-            integer port number this port was forwarded to on the remote host or
-            None if no tunnel was found.
-        """
-        idx = None
-        for i, tunnel in enumerate(self._tunnels):
-            if tunnel.local_port == local_port:
-                idx = i
-                break
-        if idx is not None:
-            tunnel = self._tunnels.pop(idx)
-            tunnel.proc.kill()
-            tunnel.proc.wait()
-            return tunnel.remote_port
-        return None
-
-    def send_file(self, local_path, remote_path, ignore_status=False):
-        """Send a file from the local host to the remote host.
-
-        Args:
-            local_path: string path of file to send on local host.
-            remote_path: string path to copy file to on remote host.
-            ignore_status: Whether or not to ignore the command's exit_status.
-        """
-        # TODO: This may belong somewhere else: b/32572515
-        user_host = self._formatter.format_host_name(self._settings)
-        job.run(
-            "scp %s %s:%s" % (local_path, user_host, remote_path),
-            ignore_status=ignore_status,
-        )
-
-    def pull_file(self, local_path, remote_path, ignore_status=False):
-        """Send a file from remote host to local host
-
-        Args:
-            local_path: string path of file to recv on local host
-            remote_path: string path to copy file from on remote host.
-            ignore_status: Whether or not to ignore the command's exit_status.
-        """
-        user_host = self._formatter.format_host_name(self._settings)
-        job.run(
-            "scp %s:%s %s" % (user_host, remote_path, local_path),
-            ignore_status=ignore_status,
-        )
-
-    def find_free_port(self, interface_name="localhost"):
-        """Find a unused port on the remote host.
-
-        Note that this method is inherently racy, since it is impossible
-        to promise that the remote port will remain free.
-
-        Args:
-            interface_name: string name of interface to check whether a
-                            port is used against.
-
-        Returns:
-            integer port number on remote interface that was free.
-        """
-        # TODO: This may belong somewhere else: b/3257251
-        free_port_cmd = (
-            'python -c "import socket; s=socket.socket(); '
-            "s.bind(('%s', 0)); print(s.getsockname()[1]); s.close()\""
-        ) % interface_name
-        port = int(self.run(free_port_cmd).stdout)
-        # Yield to the os to ensure the port gets cleaned up.
-        time.sleep(0.001)
-        return port
diff --git a/src/antlion/dict_object.py b/src/antlion/dict_object.py
deleted file mode 100644
index 9e3288f..0000000
--- a/src/antlion/dict_object.py
+++ /dev/null
@@ -1,84 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-class DictObject(dict):
-    """Optional convenient base type for creating simple objects that are
-    naturally serializable.
-
-    A DictObject provides object-oriented access semantics to a dictionary,
-    allowing it to look like a class with defined members. By ensuring that
-    all of the class members are serializable, the object can be serialized
-    as a dictionary/de-serialized from a dictionary.
-    """
-
-    def __init__(self, *args, **kwargs):
-        """Constructor for a dictionary-as-object representation of kwargs
-
-        Args:
-            args: Currently unused - included for completeness
-            kwargs: keyword arguments used to construct the underlying dict
-
-        Returns:
-            Instance of DictObject
-        """
-        super(DictObject, self).update(**kwargs)
-
-    def __getattr__(self, name):
-        """Returns a key from the superclass dictionary as an attribute
-
-        Args:
-            name: name of the pseudo class attribute
-
-        Returns:
-            Dictionary item stored at "name"
-
-        Raises:
-            AttributeError if the item is not found
-        """
-        try:
-            return self[name]
-        except KeyError as ke:
-            raise AttributeError(ke)
-
-    def __setattr__(self, name, value):
-        """Updates the value of a key=name to a given value
-
-        Args:
-            name: name of the pseudo class attribute
-            value: value of the key
-
-        Raises:
-            AttributeError if the item is not found
-        """
-        if name in super(DictObject, self).keys():
-            super(DictObject, self).__setitem__(name, value)
-        else:
-            raise AttributeError("Class does not have attribute {}".format(value))
-
-    @classmethod
-    def from_dict(cls, dictionary):
-        """Factory method for constructing a DictObject from a dictionary
-
-        Args:
-            dictionary: Dictionary used to construct the DictObject
-
-        Returns:
-            Instance of DictObject
-        """
-        c = cls()
-        c.update(dictionary)
-        return c
diff --git a/src/antlion/event/decorators.py b/src/antlion/event/decorators.py
deleted file mode 100644
index b845dad..0000000
--- a/src/antlion/event/decorators.py
+++ /dev/null
@@ -1,67 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-from antlion.event.subscription_handle import InstanceSubscriptionHandle
-from antlion.event.subscription_handle import StaticSubscriptionHandle
-from antlion.event import subscription_bundle
-
-
-def subscribe_static(event_type, event_filter=None, order=0):
-    """A decorator that subscribes a static or module-level function.
-
-    This function must be registered manually.
-    """
-
-    class InnerSubscriptionHandle(StaticSubscriptionHandle):
-        def __init__(self, func):
-            super().__init__(event_type, func, event_filter=event_filter, order=order)
-
-    return InnerSubscriptionHandle
-
-
-def subscribe(event_type, event_filter=None, order=0):
-    """A decorator that subscribes an instance method."""
-
-    class InnerSubscriptionHandle(InstanceSubscriptionHandle):
-        def __init__(self, func):
-            super().__init__(event_type, func, event_filter=event_filter, order=order)
-
-    return InnerSubscriptionHandle
-
-
-def register_static_subscriptions(decorated):
-    """Registers all static subscriptions in decorated's attributes.
-
-    Args:
-        decorated: The object being decorated
-
-    Returns:
-        The decorated.
-    """
-    subscription_bundle.create_from_static(decorated).register()
-
-    return decorated
-
-
-def register_instance_subscriptions(obj):
-    """A decorator that subscribes all instance subscriptions after object init."""
-    old_init = obj.__init__
-
-    def init_replacement(self, *args, **kwargs):
-        old_init(self, *args, **kwargs)
-        subscription_bundle.create_from_instance(self).register()
-
-    obj.__init__ = init_replacement
-    return obj
diff --git a/src/antlion/event/subscription_bundle.py b/src/antlion/event/subscription_bundle.py
deleted file mode 100644
index ac3cfb6..0000000
--- a/src/antlion/event/subscription_bundle.py
+++ /dev/null
@@ -1,156 +0,0 @@
-import logging
-import threading
-
-from antlion.event import event_bus
-from antlion.event.event_subscription import EventSubscription
-from antlion.event.subscription_handle import InstanceSubscriptionHandle
-from antlion.event.subscription_handle import SubscriptionHandle
-from antlion.event.subscription_handle import StaticSubscriptionHandle
-
-
-class SubscriptionBundle(object):
-    """A class for maintaining a set of EventSubscriptions in the event bus.
-
-    Attributes:
-        subscriptions: A dictionary of {EventSubscription: RegistrationID}
-    """
-
-    def __init__(self):
-        self.subscriptions = {}
-        self._subscription_lock = threading.Lock()
-        self._registered = False
-
-    @property
-    def registered(self):
-        """True if this SubscriptionBundle has been registered."""
-        return self._registered
-
-    def add(self, event_type, func, event_filter=None, order=0):
-        """Adds a new Subscription to this SubscriptionBundle.
-
-        If this SubscriptionBundle is registered, the added Subscription will
-        also be registered.
-
-        Returns:
-            the EventSubscription object created.
-        """
-        subscription = EventSubscription(
-            event_type, func, event_filter=event_filter, order=order
-        )
-        return self.add_subscription(subscription)
-
-    def add_subscription(self, subscription):
-        """Adds an existing Subscription to the subscription bundle.
-
-        If this SubscriptionBundle is registered, the added subscription will
-        also be registered.
-
-        Returns:
-            the subscription object.
-        """
-        registration_id = None
-        with self._subscription_lock:
-            if self.registered:
-                registration_id = event_bus.register_subscription(subscription)
-
-            self.subscriptions[subscription] = registration_id
-        return subscription
-
-    def remove_subscription(self, subscription):
-        """Removes a subscription from the SubscriptionBundle.
-
-        If the SubscriptionBundle is registered, removing the subscription will
-        also unregister it.
-        """
-        if subscription not in self.subscriptions.keys():
-            return False
-        with self._subscription_lock:
-            if self.registered:
-                event_bus.unregister(self.subscriptions[subscription])
-            del self.subscriptions[subscription]
-        return True
-
-    def register(self):
-        """Registers all subscriptions found within this object."""
-        if self.registered:
-            return
-        with self._subscription_lock:
-            self._registered = True
-            for subscription, registration_id in self.subscriptions.items():
-                if registration_id is not None:
-                    logging.warning(
-                        "Registered subscription found in "
-                        "unregistered SubscriptionBundle: %s, %s"
-                        % (subscription, registration_id)
-                    )
-                self.subscriptions[subscription] = event_bus.register_subscription(
-                    subscription
-                )
-
-    def unregister(self):
-        """Unregisters all subscriptions managed by this SubscriptionBundle."""
-        if not self.registered:
-            return
-        with self._subscription_lock:
-            self._registered = False
-            for subscription, registration_id in self.subscriptions.items():
-                if registration_id is None:
-                    logging.warning(
-                        "Unregistered subscription found in "
-                        "registered SubscriptionBundle: %s, %s"
-                        % (subscription, registration_id)
-                    )
-                event_bus.unregister(subscription)
-                self.subscriptions[subscription] = None
-
-
-def create_from_static(obj):
-    """Generates a SubscriptionBundle from @subscribe_static functions on obj.
-
-    Args:
-        obj: The object that contains @subscribe_static functions. Can either
-             be a module or a class.
-
-    Returns:
-        An unregistered SubscriptionBundle.
-    """
-    return _create_from_object(obj, obj, StaticSubscriptionHandle)
-
-
-def create_from_instance(instance):
-    """Generates a SubscriptionBundle from an instance's @subscribe functions.
-
-    Args:
-        instance: The instance object that contains @subscribe functions.
-
-    Returns:
-        An unregistered SubscriptionBundle.
-    """
-    return _create_from_object(instance, instance.__class__, InstanceSubscriptionHandle)
-
-
-def _create_from_object(obj, obj_to_search, subscription_handle_type):
-    """Generates a SubscriptionBundle from an object's SubscriptionHandles.
-
-    Note that instance variables do not have the class's functions as direct
-    attributes. The attributes are resolved from the type of the object. Here,
-    we need to search through the instance's class to find the correct types,
-    and subscribe the instance-specific subscriptions.
-
-    Args:
-        obj: The object that contains SubscriptionHandles.
-        obj_to_search: The class to search for SubscriptionHandles from.
-        subscription_handle_type: The type of the SubscriptionHandles to
-                                  capture.
-
-    Returns:
-        An unregistered SubscriptionBundle.
-    """
-    bundle = SubscriptionBundle()
-    for attr_name, attr_value in obj_to_search.__dict__.items():
-        if isinstance(attr_value, subscription_handle_type):
-            bundle.add_subscription(getattr(obj, attr_name).subscription)
-        if isinstance(attr_value, staticmethod):
-            if isinstance(getattr(obj, attr_name), subscription_handle_type):
-                bundle.add_subscription(getattr(obj, attr_name).subscription)
-    return bundle
diff --git a/src/antlion/libs/proc/job.py b/src/antlion/libs/proc/job.py
deleted file mode 100644
index c1cdc24..0000000
--- a/src/antlion/libs/proc/job.py
+++ /dev/null
@@ -1,204 +0,0 @@
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import os
-import subprocess
-import time
-
-
-class Error(Exception):
-    """Indicates that a command failed, is fatal to the test unless caught."""
-
-    def __init__(self, result):
-        super(Error, self).__init__(result)
-        self.result: Result = result
-
-
-class TimeoutError(Error):
-    """Thrown when a BackgroundJob times out on wait."""
-
-
-class Result(object):
-    """Command execution result.
-
-    Contains information on subprocess execution after it has exited.
-
-    Attributes:
-        command: An array containing the command and all arguments that
-                 was executed.
-        exit_status: Integer exit code of the process.
-        stdout_raw: The raw bytes output from standard out.
-        stderr_raw: The raw bytes output from standard error
-        duration: How long the process ran for.
-        did_timeout: True if the program timed out and was killed.
-    """
-
-    @property
-    def stdout(self):
-        """String representation of standard output."""
-        if not self._stdout_str:
-            self._stdout_str = self._raw_stdout.decode(
-                encoding=self._encoding, errors="replace"
-            )
-            self._stdout_str = self._stdout_str.strip()
-        return self._stdout_str
-
-    @property
-    def stderr(self):
-        """String representation of standard error."""
-        if not self._stderr_str:
-            self._stderr_str = self._raw_stderr.decode(
-                encoding=self._encoding, errors="replace"
-            )
-            self._stderr_str = self._stderr_str.strip()
-        return self._stderr_str
-
-    def __init__(
-        self,
-        command=[],
-        stdout=bytes(),
-        stderr=bytes(),
-        exit_status=None,
-        duration=0,
-        did_timeout=False,
-        encoding="utf-8",
-    ):
-        """
-        Args:
-            command: The command that was run. This will be a list containing
-                     the executed command and all args.
-            stdout: The raw bytes that standard output gave.
-            stderr: The raw bytes that standard error gave.
-            exit_status: The exit status of the command.
-            duration: How long the command ran.
-            did_timeout: True if the command timed out.
-            encoding: The encoding standard that the program uses.
-        """
-        self.command = command
-        self.exit_status = exit_status
-        self._raw_stdout = stdout
-        self._raw_stderr = stderr
-        self._stdout_str = None
-        self._stderr_str = None
-        self._encoding = encoding
-        self.duration = duration
-        self.did_timeout = did_timeout
-
-    def __repr__(self):
-        return (
-            "job.Result(command=%r, stdout=%r, stderr=%r, exit_status=%r, "
-            "duration=%r, did_timeout=%r, encoding=%r)"
-        ) % (
-            self.command,
-            self._raw_stdout,
-            self._raw_stderr,
-            self.exit_status,
-            self.duration,
-            self.did_timeout,
-            self._encoding,
-        )
-
-
-def run(command, timeout=60, ignore_status=False, env=None, io_encoding="utf-8"):
-    """Execute a command in a subproccess and return its output.
-
-    Commands can be either shell commands (given as strings) or the
-    path and arguments to an executable (given as a list).  This function
-    will block until the subprocess finishes or times out.
-
-    Args:
-        command: The command to execute. Can be either a string or a list.
-        timeout: number seconds to wait for command to finish.
-        ignore_status: bool True to ignore the exit code of the remote
-                       subprocess.  Note that if you do ignore status codes,
-                       you should handle non-zero exit codes explicitly.
-        env: dict enviroment variables to setup on the remote host.
-        io_encoding: str unicode encoding of command output.
-
-    Returns:
-        A job.Result containing the results of the ssh command.
-
-    Raises:
-        job.TimeoutError: When the remote command took to long to execute.
-        Error: When the command had an error executing and ignore_status==False.
-    """
-    start_time = time.time()
-    proc = subprocess.Popen(
-        command,
-        env=env,
-        stdout=subprocess.PIPE,
-        stderr=subprocess.PIPE,
-        shell=not isinstance(command, list),
-    )
-    # Wait on the process terminating
-    timed_out = False
-    out = bytes()
-    err = bytes()
-    try:
-        (out, err) = proc.communicate(timeout=timeout)
-    except subprocess.TimeoutExpired:
-        timed_out = True
-        proc.kill()
-        proc.wait()
-
-    result = Result(
-        command=command,
-        stdout=out,
-        stderr=err,
-        exit_status=proc.returncode,
-        duration=time.time() - start_time,
-        encoding=io_encoding,
-        did_timeout=timed_out,
-    )
-    logging.debug(result)
-
-    if timed_out:
-        logging.error("Command %s with %s timeout setting timed out", command, timeout)
-        raise TimeoutError(result)
-
-    if not ignore_status and proc.returncode != 0:
-        raise Error(result)
-
-    return result
-
-
-def run_async(command, env=None):
-    """Execute a command in a subproccess asynchronously.
-
-    It is the callers responsibility to kill/wait on the resulting
-    subprocess.Popen object.
-
-    Commands can be either shell commands (given as strings) or the
-    path and arguments to an executable (given as a list).  This function
-    will not block.
-
-    Args:
-        command: The command to execute. Can be either a string or a list.
-        env: dict enviroment variables to setup on the remote host.
-
-    Returns:
-        A subprocess.Popen object representing the created subprocess.
-
-    """
-    proc = subprocess.Popen(
-        command,
-        env=env,
-        preexec_fn=os.setpgrp,
-        shell=not isinstance(command, list),
-        stdout=subprocess.PIPE,
-        stderr=subprocess.STDOUT,
-    )
-    logging.debug("command %s started with pid %s", command, proc.pid)
-    return proc
diff --git a/src/antlion/libs/yaml_writer.py b/src/antlion/libs/yaml_writer.py
deleted file mode 100644
index 33c349f..0000000
--- a/src/antlion/libs/yaml_writer.py
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-import yaml
-
-# Allow yaml to dump OrderedDict
-yaml.add_representer(
-    collections.OrderedDict,
-    lambda dumper, data: dumper.represent_dict(data),
-    Dumper=yaml.SafeDumper,
-)
-
-
-def _str_representer(dumper, data):
-    if len(data.splitlines()) > 1:
-        data = "\n".join(
-            line.replace("\t", "    ").rstrip() for line in data.splitlines()
-        )
-        return dumper.represent_scalar("tag:yaml.org,2002:str", data, style="|")
-    return dumper.represent_scalar("tag:yaml.org,2002:str", data)
-
-
-# Automatically convert multiline strings into block literals
-yaml.add_representer(str, _str_representer, Dumper=yaml.SafeDumper)
-
-_DUMP_KWARGS = dict(explicit_start=True, allow_unicode=True, indent=4)
-if yaml.__version__ >= "5.1":
-    _DUMP_KWARGS.update(sort_keys=False)
-
-
-def safe_dump(content, file):
-    """Calls yaml.safe_dump to write content to the file, with additional
-    parameters from _DUMP_KWARGS."""
-    yaml.safe_dump(content, file, **_DUMP_KWARGS)
diff --git a/src/antlion/logger.py b/src/antlion/logger.py
deleted file mode 100755
index 1d18ad8..0000000
--- a/src/antlion/logger.py
+++ /dev/null
@@ -1,319 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import datetime
-import logging
-import os
-import re
-
-from copy import copy
-
-from antlion import tracelogger
-from antlion.libs.logging import log_stream
-from antlion.libs.logging.log_stream import LogStyles
-
-log_line_format = "%(asctime)s.%(msecs).03d %(levelname)s %(message)s"
-# The micro seconds are added by the format string above,
-# so the time format does not include ms.
-log_line_time_format = "%Y-%m-%d %H:%M:%S"
-log_line_timestamp_len = 23
-
-logline_timestamp_re = re.compile("\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d.\d\d\d")
-
-
-# yapf: disable
-class Style:
-    RESET  = '\033[0m'
-    BRIGHT = '\033[1m'
-    DIM    = '\033[2m'
-    NORMAL = '\033[22m'
-
-
-class Fore:
-    BLACK   = '\033[30m'
-    RED     = '\033[31m'
-    GREEN   = '\033[32m'
-    YELLOW  = '\033[33m'
-    BLUE    = '\033[34m'
-    MAGENTA = '\033[35m'
-    CYAN    = '\033[36m'
-    WHITE   = '\033[37m'
-    RESET   = '\033[39m'
-
-
-class Back:
-    BLACK   = '\033[40m'
-    RED     = '\033[41m'
-    GREEN   = '\033[42m'
-    YELLOW  = '\033[43m'
-    BLUE    = '\033[44m'
-    MAGENTA = '\033[45m'
-    CYAN    = '\033[46m'
-    WHITE   = '\033[47m'
-    RESET   = '\033[49m'
-
-
-LOG_LEVELS = {
-  'DEBUG':     {'level': 10, 'style': Fore.GREEN + Style.BRIGHT},
-  'CASE':      {'level': 11, 'style': Back.BLUE + Fore.WHITE + Style.BRIGHT},
-  'SUITE':     {'level': 12, 'style': Back.MAGENTA + Fore.WHITE + Style.BRIGHT},
-  'INFO':      {'level': 20, 'style': Style.NORMAL},
-  'STEP':      {'level': 15, 'style': Fore.WHITE + Style.BRIGHT},
-  'WARNING':   {'level': 30, 'style': Fore.YELLOW + Style.BRIGHT},
-  'ERROR':     {'level': 40, 'style': Fore.RED + Style.BRIGHT},
-  'EXCEPTION': {'level': 45, 'style': Back.RED + Fore.WHITE + Style.BRIGHT},
-  'DEVICE':    {'level': 51, 'style': Fore.CYAN + Style.BRIGHT},
-}
-# yapf: enable
-
-
-class ColoredLogFormatter(logging.Formatter):
-    def format(self, record):
-        colored_record = copy(record)
-        level_name = colored_record.levelname
-        style = LOG_LEVELS[level_name]["style"]
-        formatted_level_name = "%s%s%s" % (style, level_name, Style.RESET)
-        colored_record.levelname = formatted_level_name
-        return super().format(colored_record)
-
-
-def _parse_logline_timestamp(t):
-    """Parses a logline timestamp into a tuple.
-
-    Args:
-        t: Timestamp in logline format.
-
-    Returns:
-        An iterable of date and time elements in the order of month, day, hour,
-        minute, second, microsecond.
-    """
-    date, time = t.split(" ")
-    year, month, day = date.split("-")
-    h, m, s = time.split(":")
-    s, ms = s.split(".")
-    return year, month, day, h, m, s, ms
-
-
-def is_valid_logline_timestamp(timestamp):
-    if len(timestamp) == log_line_timestamp_len:
-        if logline_timestamp_re.match(timestamp):
-            return True
-    return False
-
-
-def logline_timestamp_comparator(t1, t2):
-    """Comparator for timestamps in logline format.
-
-    Args:
-        t1: Timestamp in logline format.
-        t2: Timestamp in logline format.
-
-    Returns:
-        -1 if t1 < t2; 1 if t1 > t2; 0 if t1 == t2.
-    """
-    dt1 = _parse_logline_timestamp(t1)
-    dt2 = _parse_logline_timestamp(t2)
-    for u1, u2 in zip(dt1, dt2):
-        if u1 < u2:
-            return -1
-        elif u1 > u2:
-            return 1
-    return 0
-
-
-def _get_timestamp(time_format, delta=None):
-    t = datetime.datetime.now()
-    if delta:
-        t = t + datetime.timedelta(seconds=delta)
-    return t.strftime(time_format)[:-3]
-
-
-def epoch_to_log_line_timestamp(epoch_time):
-    """Converts an epoch timestamp in ms to log line timestamp format, which
-    is readable for humans.
-
-    Args:
-        epoch_time: integer, an epoch timestamp in ms.
-
-    Returns:
-        A string that is the corresponding timestamp in log line timestamp
-        format.
-    """
-    s, ms = divmod(epoch_time, 1000)
-    d = datetime.datetime.fromtimestamp(s)
-    return d.strftime("%Y-%m-%d %H:%M:%S.") + str(ms)
-
-
-def get_log_line_timestamp(delta=None):
-    """Returns a timestamp in the format used by log lines.
-
-    Default is current time. If a delta is set, the return value will be
-    the current time offset by delta seconds.
-
-    Args:
-        delta: Number of seconds to offset from current time; can be negative.
-
-    Returns:
-        A timestamp in log line format with an offset.
-    """
-    return _get_timestamp("%Y-%m-%d %H:%M:%S.%f", delta)
-
-
-def get_log_file_timestamp(delta=None):
-    """Returns a timestamp in the format used for log file names.
-
-    Default is current time. If a delta is set, the return value will be
-    the current time offset by delta seconds.
-
-    Args:
-        delta: Number of seconds to offset from current time; can be negative.
-
-    Returns:
-        A timestamp in log file name format with an offset.
-    """
-    return _get_timestamp("%Y-%m-%d_%H-%M-%S-%f", delta)
-
-
-def _setup_test_logger(log_path, prefix=None):
-    """Customizes the root logger for a test run.
-
-    The logger object has a stream handler and a file handler. The stream
-    handler logs INFO level to the terminal, the file handler logs DEBUG
-    level to files.
-
-    Args:
-        log_path: Location of the log file.
-        prefix: A prefix for each log line in terminal.
-    """
-    logging.log_path = log_path
-    log_styles = [
-        LogStyles.LOG_INFO + LogStyles.TO_STDOUT,
-        LogStyles.DEFAULT_LEVELS + LogStyles.TESTCASE_LOG,
-    ]
-    terminal_format = log_line_format
-    if prefix:
-        terminal_format = "[{}] {}".format(prefix, log_line_format)
-    stream_formatter = ColoredLogFormatter(terminal_format, log_line_time_format)
-    file_formatter = logging.Formatter(log_line_format, log_line_time_format)
-    log = log_stream.create_logger(
-        "test_run",
-        "",
-        log_styles=log_styles,
-        stream_format=stream_formatter,
-        file_format=file_formatter,
-    )
-    log.setLevel(logging.DEBUG)
-    _enable_additional_log_levels()
-
-
-def _enable_additional_log_levels():
-    """Enables logging levels used for tracing tests and debugging devices."""
-    for log_type, log_data in LOG_LEVELS.items():
-        logging.addLevelName(log_data["level"], log_type)
-
-
-def kill_test_logger(logger):
-    """Cleans up a test logger object by removing all of its handlers.
-
-    Args:
-        logger: The logging object to clean up.
-    """
-    for h in list(logger.handlers):
-        logger.removeHandler(h)
-        if isinstance(h, logging.FileHandler):
-            h.close()
-
-
-def create_latest_log_alias(actual_path):
-    """Creates a symlink to the latest test run logs.
-
-    Args:
-        actual_path: The source directory where the latest test run's logs are.
-    """
-    link_path = os.path.join(os.path.dirname(actual_path), "latest")
-    if os.path.islink(link_path):
-        os.remove(link_path)
-    try:
-        os.symlink(actual_path, link_path)
-    except OSError:
-        logging.warning("Failed to create symlink to latest logs dir.", exc_info=True)
-
-
-def setup_test_logger(log_path, prefix=None):
-    """Customizes the root logger for a test run.
-
-    Args:
-        log_path: Location of the report file.
-        prefix: A prefix for each log line in terminal.
-        filename: Name of the files. The default is the time the objects
-            are requested.
-    """
-    os.makedirs(log_path, exist_ok=True)
-    _setup_test_logger(log_path, prefix)
-    create_latest_log_alias(log_path)
-
-
-def normalize_log_line_timestamp(log_line_timestamp):
-    """Replace special characters in log line timestamp with normal characters.
-
-    Args:
-        log_line_timestamp: A string in the log line timestamp format. Obtained
-            with get_log_line_timestamp.
-
-    Returns:
-        A string representing the same time as input timestamp, but without
-        special characters.
-    """
-    norm_tp = log_line_timestamp.replace(" ", "_")
-    norm_tp = norm_tp.replace(":", "-")
-    return norm_tp
-
-
-class LoggerAdapter(logging.LoggerAdapter):
-    """A LoggerAdapter class that takes in a lambda for transforming logs."""
-
-    def __init__(self, logging_lambda):
-        self.logging_lambda = logging_lambda
-        super(LoggerAdapter, self).__init__(logging.getLogger(), {})
-
-    def process(self, msg, kwargs):
-        return self.logging_lambda(msg), kwargs
-
-
-def create_logger(logging_lambda=lambda message: message):
-    """Returns a logger with logging defined by a given lambda.
-
-    Args:
-        logging_lambda: A lambda of the form:
-            >>> lambda log_message: return 'string'
-    """
-    return tracelogger.TraceLogger(LoggerAdapter(logging_lambda))
-
-
-def create_tagged_trace_logger(tag=""):
-    """Returns a logger that logs each line with the given prefix.
-
-    Args:
-        tag: The tag of the log line, E.g. if tag == tag123, the output
-            line would be:
-
-            <TESTBED> <TIME> <LOG_LEVEL> [tag123] logged message
-    """
-
-    def logging_lambda(msg):
-        return "[%s] %s" % (tag, msg)
-
-    return create_logger(logging_lambda)
diff --git a/src/antlion/records.py b/src/antlion/records.py
deleted file mode 100644
index 1c7ad23..0000000
--- a/src/antlion/records.py
+++ /dev/null
@@ -1,247 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""This module is where all the record definitions and record containers live.
-"""
-
-import collections
-import copy
-import io
-import json
-
-from antlion import logger
-from antlion.libs import yaml_writer
-
-from mobly.records import ExceptionRecord
-from mobly.records import OUTPUT_FILE_SUMMARY
-from mobly.records import TestResultEnums as MoblyTestResultEnums
-from mobly.records import TestResultRecord as MoblyTestResultRecord
-from mobly.records import TestResult as MoblyTestResult
-from mobly.records import TestSummaryEntryType
-from mobly.records import TestSummaryWriter as MoblyTestSummaryWriter
-
-
-class TestSummaryWriter(MoblyTestSummaryWriter):
-    """Writes test results to a summary file in real time. Inherits from Mobly's
-    TestSummaryWriter.
-    """
-
-    def dump(self, content, entry_type):
-        """Update Mobly's implementation of dump to work on OrderedDict.
-
-        See MoblyTestSummaryWriter.dump for documentation.
-        """
-        new_content = collections.OrderedDict(copy.deepcopy(content))
-        new_content["Type"] = entry_type.value
-        new_content.move_to_end("Type", last=False)
-        # Both user code and Mobly code can trigger this dump, hence the lock.
-        with self._lock:
-            # For Python3, setting the encoding on yaml.safe_dump does not work
-            # because Python3 file descriptors set an encoding by default, which
-            # PyYAML uses instead of the encoding on yaml.safe_dump. So, the
-            # encoding has to be set on the open call instead.
-            with io.open(self._path, "a", encoding="utf-8") as f:
-                # Use safe_dump here to avoid language-specific tags in final
-                # output.
-                yaml_writer.safe_dump(new_content, f)
-
-
-class TestResultEnums(MoblyTestResultEnums):
-    """Enums used for TestResultRecord class. Inherits from Mobly's
-    TestResultEnums.
-
-    Includes the tokens to mark test result with, and the string names for each
-    field in TestResultRecord.
-    """
-
-    RECORD_LOG_BEGIN_TIME = "Log Begin Time"
-    RECORD_LOG_END_TIME = "Log End Time"
-
-
-class TestResultRecord(MoblyTestResultRecord):
-    """A record that holds the information of a test case execution. This class
-    inherits from Mobly's TestResultRecord class.
-
-    Attributes:
-        test_name: A string representing the name of the test case.
-        begin_time: Epoch timestamp of when the test case started.
-        end_time: Epoch timestamp of when the test case ended.
-        self.uid: Unique identifier of a test case.
-        self.result: Test result, PASS/FAIL/SKIP.
-        self.extras: User defined extra information of the test result.
-        self.details: A string explaining the details of the test case.
-    """
-
-    def __init__(self, t_name, t_class=None):
-        super().__init__(t_name, t_class)
-        self.log_begin_time = None
-        self.log_end_time = None
-
-    def test_begin(self):
-        """Call this when the test case it records begins execution.
-
-        Sets the begin_time of this record.
-        """
-        super().test_begin()
-        self.log_begin_time = logger.epoch_to_log_line_timestamp(self.begin_time)
-
-    def _test_end(self, result, e):
-        """Class internal function to signal the end of a test case execution.
-
-        Args:
-            result: One of the TEST_RESULT enums in TestResultEnums.
-            e: A test termination signal (usually an exception object). It can
-                be any exception instance or of any subclass of
-                acts.signals.TestSignal.
-        """
-        super()._test_end(result, e)
-        if self.end_time:
-            self.log_end_time = logger.epoch_to_log_line_timestamp(self.end_time)
-
-    def to_dict(self):
-        """Gets a dictionary representing the content of this class.
-
-        Returns:
-            A dictionary representing the content of this class.
-        """
-        d = collections.OrderedDict()
-        d[TestResultEnums.RECORD_NAME] = self.test_name
-        d[TestResultEnums.RECORD_CLASS] = self.test_class
-        d[TestResultEnums.RECORD_BEGIN_TIME] = self.begin_time
-        d[TestResultEnums.RECORD_END_TIME] = self.end_time
-        d[TestResultEnums.RECORD_LOG_BEGIN_TIME] = self.log_begin_time
-        d[TestResultEnums.RECORD_LOG_END_TIME] = self.log_end_time
-        d[TestResultEnums.RECORD_RESULT] = self.result
-        d[TestResultEnums.RECORD_UID] = self.uid
-        d[TestResultEnums.RECORD_EXTRAS] = self.extras
-        d[TestResultEnums.RECORD_DETAILS] = self.details
-        d[TestResultEnums.RECORD_EXTRA_ERRORS] = {
-            key: value.to_dict() for (key, value) in self.extra_errors.items()
-        }
-        d[TestResultEnums.RECORD_STACKTRACE] = self.stacktrace
-        return d
-
-    def json_str(self):
-        """Converts this test record to a string in json format.
-
-        Format of the json string is:
-            {
-                'Test Name': <test name>,
-                'Begin Time': <epoch timestamp>,
-                'Details': <details>,
-                ...
-            }
-
-        Returns:
-            A json-format string representing the test record.
-        """
-        return json.dumps(self.to_dict())
-
-
-class TestResult(MoblyTestResult):
-    """A class that contains metrics of a test run. This class inherits from
-    Mobly's TestResult class.
-
-    This class is essentially a container of TestResultRecord objects.
-
-    Attributes:
-        self.requested: A list of strings, each is the name of a test requested
-            by user.
-        self.failed: A list of records for tests failed.
-        self.executed: A list of records for tests that were actually executed.
-        self.passed: A list of records for tests passed.
-        self.skipped: A list of records for tests skipped.
-    """
-
-    def __add__(self, r):
-        """Overrides '+' operator for TestResult class.
-
-        The add operator merges two TestResult objects by concatenating all of
-        their lists together.
-
-        Args:
-            r: another instance of TestResult to be added
-
-        Returns:
-            A TestResult instance that's the sum of two TestResult instances.
-        """
-        if not isinstance(r, MoblyTestResult):
-            raise TypeError("Operand %s of type %s is not a TestResult." % (r, type(r)))
-        sum_result = TestResult()
-        for name in sum_result.__dict__:
-            r_value = getattr(r, name)
-            l_value = getattr(self, name)
-            if isinstance(r_value, list):
-                setattr(sum_result, name, l_value + r_value)
-        return sum_result
-
-    def json_str(self):
-        """Converts this test result to a string in json format.
-
-        Format of the json string is:
-            {
-                "Results": [
-                    {<executed test record 1>},
-                    {<executed test record 2>},
-                    ...
-                ],
-                "Summary": <summary dict>
-            }
-
-        Returns:
-            A json-format string representing the test results.
-        """
-        d = collections.OrderedDict()
-        d["ControllerInfo"] = {
-            record.controller_name: record.controller_info
-            for record in self.controller_info
-        }
-        d["Results"] = [record.to_dict() for record in self.executed]
-        d["Summary"] = self.summary_dict()
-        d["Error"] = self.errors_list()
-        json_str = json.dumps(d, indent=4)
-        return json_str
-
-    def summary_str(self):
-        """Gets a string that summarizes the stats of this test result.
-
-        The summary provides the counts of how many test cases fall into each
-        category, like "Passed", "Failed" etc.
-
-        Format of the string is:
-            Requested <int>, Executed <int>, ...
-
-        Returns:
-            A summary string of this test result.
-        """
-        l = ["%s %s" % (k, v) for k, v in self.summary_dict().items()]
-        msg = ", ".join(l)
-        return msg
-
-    def errors_list(self):
-        l = list()
-        for record in self.error:
-            if isinstance(record, TestResultRecord):
-                keys = [
-                    TestResultEnums.RECORD_NAME,
-                    TestResultEnums.RECORD_DETAILS,
-                    TestResultEnums.RECORD_EXTRA_ERRORS,
-                ]
-            elif isinstance(record, ExceptionRecord):
-                keys = [TestResultEnums.RECORD_DETAILS, TestResultEnums.RECORD_POSITION]
-            else:
-                return []
-            l.append({k: record.to_dict()[k] for k in keys})
-        return l
diff --git a/src/antlion/signals.py b/src/antlion/signals.py
deleted file mode 100644
index a3599f4..0000000
--- a/src/antlion/signals.py
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""This module is where all the test signal classes and related utilities live.
-"""
-
-from mobly.signals import *
diff --git a/src/antlion/test_decorators.py b/src/antlion/test_decorators.py
deleted file mode 100644
index a152f4f..0000000
--- a/src/antlion/test_decorators.py
+++ /dev/null
@@ -1,99 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import signals
-
-
-def __select_last(test_signals, _):
-    return test_signals[-1]
-
-
-def repeated_test(num_passes, acceptable_failures=0, result_selector=__select_last):
-    """A decorator that runs a test case multiple times.
-
-    This decorator can be used to run a test multiple times and aggregate the
-    data into a single test result. By setting `result_selector`, the user can
-    access the returned result of each run, allowing them to average results,
-    return the median, or gather and return standard deviation values.
-
-    This decorator should be used on test cases, and should not be used on
-    static or class methods. The test case must take in an additional argument,
-    `attempt_number`, which returns the current attempt number, starting from
-    1.
-
-    Note that any TestSignal intended to abort or skip the test will take
-    abort or skip immediately.
-
-    Args:
-        num_passes: The number of times the test needs to pass to report the
-            test case as passing.
-        acceptable_failures: The number of failures accepted. If the failures
-            exceeds this number, the test will stop repeating. The maximum
-            number of runs is `num_passes + acceptable_failures`. If the test
-            does fail, result_selector will still be called.
-        result_selector: A lambda that takes in the list of TestSignals and
-            returns the test signal to report the test case as. Note that the
-            list also contains any uncaught exceptions from the test execution.
-    """
-
-    def decorator(func):
-        if not func.__name__.startswith("test_"):
-            raise ValueError('Tests must start with "test_".')
-
-        def test_wrapper(self):
-            num_failures = 0
-            num_seen_passes = 0
-            test_signals_received = []
-            for i in range(num_passes + acceptable_failures):
-                try:
-                    func(self, i + 1)
-                except (
-                    signals.TestFailure,
-                    signals.TestError,
-                    AssertionError,
-                ) as signal:
-                    test_signals_received.append(signal)
-                    num_failures += 1
-                except signals.TestPass as signal:
-                    test_signals_received.append(signal)
-                    num_seen_passes += 1
-                except (signals.TestSignal, KeyboardInterrupt):
-                    raise
-                except Exception as signal:
-                    test_signals_received.append(signal)
-                    num_failures += 1
-                else:
-                    num_seen_passes += 1
-                    test_signals_received.append(
-                        signals.TestPass(
-                            "Test iteration %s of %s passed without details."
-                            % (i, func.__name__)
-                        )
-                    )
-
-                if num_failures > acceptable_failures:
-                    break
-                elif num_seen_passes == num_passes:
-                    break
-                else:
-                    self.teardown_test()
-                    self.setup_test()
-
-            raise result_selector(test_signals_received, self)
-
-        return test_wrapper
-
-    return decorator
diff --git a/src/antlion/test_runner.py b/src/antlion/test_runner.py
deleted file mode 100644
index bcb516f..0000000
--- a/src/antlion/test_runner.py
+++ /dev/null
@@ -1,328 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import itertools
-
-import importlib
-import inspect
-import fnmatch
-import json
-import logging
-import os
-import sys
-
-from antlion import base_test
-from antlion import keys
-from antlion import logger
-from antlion import records
-from antlion import signals
-from antlion import utils
-from antlion import error
-
-from mobly.records import ExceptionRecord
-
-
-def _find_test_class():
-    """Finds the test class in a test script.
-
-    Walk through module members and find the subclass of BaseTestClass. Only
-    one subclass is allowed in a test script.
-
-    Returns:
-        The test class in the test module.
-    """
-    test_classes = []
-    main_module_members = sys.modules["__main__"]
-    for _, module_member in main_module_members.__dict__.items():
-        if inspect.isclass(module_member):
-            if issubclass(module_member, base_test.BaseTestClass):
-                test_classes.append(module_member)
-    if len(test_classes) != 1:
-        logging.error(
-            "Expected 1 test class per file, found %s.",
-            [t.__name__ for t in test_classes],
-        )
-        sys.exit(1)
-    return test_classes[0]
-
-
-def execute_one_test_class(test_class, test_config, test_identifier):
-    """Executes one specific test class.
-
-    You could call this function in your own cli test entry point if you choose
-    not to use act.py.
-
-    Args:
-        test_class: A subclass of acts.base_test.BaseTestClass that has the test
-                    logic to be executed.
-        test_config: A dict representing one set of configs for a test run.
-        test_identifier: A list of tuples specifying which test cases to run in
-                         the test class.
-
-    Returns:
-        True if all tests passed without any error, False otherwise.
-
-    Raises:
-        If signals.TestAbortAll is raised by a test run, pipe it through.
-    """
-    tr = TestRunner(test_config, test_identifier)
-    try:
-        tr.run(test_class)
-        return tr.results.is_all_pass
-    except signals.TestAbortAll:
-        raise
-    except:
-        logging.exception("Exception when executing %s.", tr.testbed_name)
-    finally:
-        tr.stop()
-
-
-class TestRunner(object):
-    """The class that instantiates test classes, executes test cases, and
-    report results.
-
-    Attributes:
-        test_run_config: The TestRunConfig object specifying what tests to run.
-        id: A string that is the unique identifier of this test run.
-        log: The logger object used throughout this test run.
-        test_classes: A dictionary where we can look up the test classes by name
-            to instantiate. Supports unix shell style wildcards.
-        run_list: A list of tuples specifying what tests to run.
-        results: The test result object used to record the results of this test
-            run.
-        running: A boolean signifies whether this test run is ongoing or not.
-    """
-
-    def __init__(self, test_configs, run_list):
-        self.test_run_config = test_configs
-        self.testbed_name = self.test_run_config.testbed_name
-        start_time = logger.get_log_file_timestamp()
-        self.id = "{}@{}".format(self.testbed_name, start_time)
-        self.test_run_config.log_path = os.path.abspath(
-            os.path.join(self.test_run_config.log_path, self.testbed_name, start_time)
-        )
-        logger.setup_test_logger(self.log_path, self.testbed_name)
-        self.log = logging.getLogger()
-        self.test_run_config.summary_writer = records.TestSummaryWriter(
-            os.path.join(self.log_path, records.OUTPUT_FILE_SUMMARY)
-        )
-        self.run_list = run_list
-        self.dump_config()
-        self.results = records.TestResult()
-        self.running = False
-
-    @property
-    def log_path(self):
-        """The path to write logs of this test run to."""
-        return self.test_run_config.log_path
-
-    @property
-    def summary_writer(self):
-        """The object responsible for writing summary and results data."""
-        return self.test_run_config.summary_writer
-
-    def import_test_modules(self, test_paths):
-        """Imports test classes from test scripts.
-
-        1. Locate all .py files under test paths.
-        2. Import the .py files as modules.
-        3. Find the module members that are test classes.
-        4. Categorize the test classes by name.
-
-        Args:
-            test_paths: A list of directory paths where the test files reside.
-
-        Returns:
-            A dictionary where keys are test class name strings, values are
-            actual test classes that can be instantiated.
-        """
-
-        def is_testfile_name(name, ext):
-            if ext == ".py":
-                if name.endswith("Test") or name.endswith("_test"):
-                    return True
-            return False
-
-        file_list = utils.find_files(test_paths, is_testfile_name)
-        test_classes = {}
-        for path, name, _ in file_list:
-            sys.path.append(path)
-            try:
-                with utils.SuppressLogOutput(log_levels=[logging.INFO, logging.ERROR]):
-                    module = importlib.import_module(name)
-            except Exception as e:
-                logging.debug("Failed to import %s: %s", path, str(e))
-                for test_cls_name, _ in self.run_list:
-                    alt_name = name.replace("_", "").lower()
-                    alt_cls_name = test_cls_name.lower()
-                    # Only block if a test class on the run list causes an
-                    # import error. We need to check against both naming
-                    # conventions: AaaBbb and aaa_bbb.
-                    if name == test_cls_name or alt_name == alt_cls_name:
-                        msg = (
-                            "Encountered error importing test class %s, " "abort."
-                        ) % test_cls_name
-                        # This exception is logged here to help with debugging
-                        # under py2, because "raise X from Y" syntax is only
-                        # supported under py3.
-                        self.log.exception(msg)
-                        raise ValueError(msg)
-                continue
-            for member_name in dir(module):
-                if not member_name.startswith("__"):
-                    if member_name.endswith("Test"):
-                        test_class = getattr(module, member_name)
-                        if inspect.isclass(test_class):
-                            test_classes[member_name] = test_class
-        return test_classes
-
-    def run_test_class(self, test_cls_name, test_cases=None):
-        """Instantiates and executes a test class.
-
-        If test_cases is None, the test cases listed by self.tests will be
-        executed instead. If self.tests is empty as well, no test case in this
-        test class will be executed.
-
-        Args:
-            test_cls_name: Name of the test class to execute.
-            test_cases: List of test case names to execute within the class.
-
-        Raises:
-            ValueError is raised if the requested test class could not be found
-            in the test_paths directories.
-        """
-        matches = fnmatch.filter(self.test_classes.keys(), test_cls_name)
-        if not matches:
-            self.log.info(
-                "Cannot find test class %s or classes matching pattern, "
-                "skipping for now." % test_cls_name
-            )
-            record = records.TestResultRecord("*all*", test_cls_name)
-            record.test_skip(signals.TestSkip("Test class does not exist."))
-            self.results.add_record(record)
-            return
-        if matches != [test_cls_name]:
-            self.log.info(
-                "Found classes matching pattern %s: %s", test_cls_name, matches
-            )
-
-        for test_cls_name_match in matches:
-            test_cls = self.test_classes[test_cls_name_match]
-            test_cls_instance = test_cls(self.test_run_config)
-            try:
-                cls_result = test_cls_instance.run(test_cases)
-                self.results += cls_result
-            except signals.TestAbortAll as e:
-                self.results += e.results
-                raise e
-
-    def run(self, test_class=None):
-        """Executes test cases.
-
-        This will instantiate controller and test classes, and execute test
-        classes. This can be called multiple times to repeatedly execute the
-        requested test cases.
-
-        A call to TestRunner.stop should eventually happen to conclude the life
-        cycle of a TestRunner.
-
-        Args:
-            test_class: The python module of a test class. If provided, run this
-                        class; otherwise, import modules in under test_paths
-                        based on run_list.
-        """
-        if not self.running:
-            self.running = True
-
-        if test_class:
-            self.test_classes = {test_class.__name__: test_class}
-        else:
-            t_paths = self.test_run_config.controller_configs[
-                keys.Config.key_test_paths.value
-            ]
-            self.test_classes = self.import_test_modules(t_paths)
-        self.log.debug("Executing run list %s.", self.run_list)
-        for test_cls_name, test_case_names in self.run_list:
-            if not self.running:
-                break
-
-            if test_case_names:
-                self.log.debug(
-                    "Executing test cases %s in test class %s.",
-                    test_case_names,
-                    test_cls_name,
-                )
-            else:
-                self.log.debug("Executing test class %s", test_cls_name)
-
-            try:
-                self.run_test_class(test_cls_name, test_case_names)
-            except error.ActsError as e:
-                self.results.error.append(ExceptionRecord(e))
-                self.log.error("Test Runner Error: %s" % e.details)
-            except signals.TestAbortAll as e:
-                self.log.warning("Abort all subsequent test classes. Reason: %s", e)
-                raise
-
-    def stop(self):
-        """Releases resources from test run. Should always be called after
-        TestRunner.run finishes.
-
-        This function concludes a test run and writes out a test report.
-        """
-        if self.running:
-            msg = "\nSummary for test run %s: %s\n" % (
-                self.id,
-                self.results.summary_str(),
-            )
-            self._write_results_to_file()
-            self.log.info(msg.strip())
-            logger.kill_test_logger(self.log)
-            self.running = False
-
-    def _write_results_to_file(self):
-        """Writes test results to file(s) in a serializable format."""
-        # Old JSON format
-        path = os.path.join(self.log_path, "test_run_summary.json")
-        with open(path, "w") as f:
-            f.write(self.results.json_str())
-        # New YAML format
-        self.summary_writer.dump(
-            self.results.summary_dict(), records.TestSummaryEntryType.SUMMARY
-        )
-
-    def dump_config(self):
-        """Writes the test config to a JSON file under self.log_path"""
-        config_path = os.path.join(self.log_path, "test_configs.json")
-        with open(config_path, "a") as f:
-            json.dump(
-                dict(
-                    itertools.chain(
-                        self.test_run_config.user_params.items(),
-                        self.test_run_config.controller_configs.items(),
-                    )
-                ),
-                f,
-                skipkeys=True,
-                indent=4,
-            )
-
-    def write_test_campaign(self):
-        """Log test campaign file."""
-        path = os.path.join(self.log_path, "test_campaign.log")
-        with open(path, "w") as f:
-            for test_class, test_cases in self.run_list:
-                f.write("%s:\n%s" % (test_class, ",\n".join(test_cases)))
-                f.write("\n\n")
diff --git a/src/antlion/test_utils/abstract_devices/wlan_device.py b/src/antlion/test_utils/abstract_devices/wlan_device.py
deleted file mode 100644
index 5891012..0000000
--- a/src/antlion/test_utils/abstract_devices/wlan_device.py
+++ /dev/null
@@ -1,598 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import inspect
-import logging
-
-from antlion.controllers import iperf_client
-from antlion.controllers.android_device import AndroidDevice
-from antlion.controllers.fuchsia_device import FuchsiaDevice
-from antlion.test_utils.wifi import wifi_test_utils as awutils
-from antlion.utils import adb_shell_ping
-
-FUCHSIA_VALID_SECURITY_TYPES = {"none", "wep", "wpa", "wpa2", "wpa3"}
-
-
-def create_wlan_device(hardware_device):
-    """Creates a generic WLAN device based on type of device that is sent to
-    the functions.
-
-    Args:
-        hardware_device: A WLAN hardware device that is supported by ACTS.
-    """
-    if isinstance(hardware_device, FuchsiaDevice):
-        return FuchsiaWlanDevice(hardware_device)
-    elif isinstance(hardware_device, AndroidDevice):
-        return AndroidWlanDevice(hardware_device)
-    else:
-        raise ValueError(
-            "Unable to create WlanDevice for type %s" % type(hardware_device)
-        )
-
-
-class WlanDevice(object):
-    """Class representing a generic WLAN device.
-
-    Each object of this class represents a generic WLAN device.
-    Android device and Fuchsia devices are the currently supported devices/
-
-    Attributes:
-        device: A generic WLAN device.
-    """
-
-    def __init__(self, device):
-        self.device = device
-        self.log = logging
-        self.identifier = None
-
-    def wifi_toggle_state(self, state):
-        """Base generic WLAN interface.  Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError(
-            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
-        )
-
-    def reset_wifi(self):
-        """Base generic WLAN interface.  Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError(
-            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
-        )
-
-    def take_bug_report(self, test_name=None, begin_time=None):
-        """Base generic WLAN interface.  Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError(
-            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
-        )
-
-    def get_log(self, test_name, begin_time):
-        """Base generic WLAN interface.  Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError(
-            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
-        )
-
-    def turn_location_off_and_scan_toggle_off(self):
-        """Base generic WLAN interface.  Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError(
-            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
-        )
-
-    def associate(
-        self,
-        target_ssid,
-        target_pwd=None,
-        check_connectivity=True,
-        hidden=False,
-        target_security=None,
-    ):
-        """Base generic WLAN interface.  Only called if not overriden by
-        another supported device.
-        """
-        raise NotImplementedError(
-            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
-        )
-
-    def disconnect(self):
-        """Base generic WLAN interface.  Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError(
-            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
-        )
-
-    def get_wlan_interface_id_list(self):
-        """Base generic WLAN interface.  Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError(
-            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
-        )
-
-    def get_default_wlan_test_interface(self):
-        raise NotImplementedError(
-            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
-        )
-
-    def destroy_wlan_interface(self, iface_id):
-        """Base generic WLAN interface.  Only called if not overridden by
-        another supported device.
-        """
-        raise NotImplementedError(
-            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
-        )
-
-    def send_command(self, command):
-        raise NotImplementedError(
-            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
-        )
-
-    def is_connected(self, ssid=None):
-        raise NotImplementedError(
-            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
-        )
-
-    def can_ping(
-        self,
-        dest_ip,
-        count=3,
-        interval=1000,
-        timeout=1000,
-        size=25,
-        additional_ping_params=None,
-    ):
-        raise NotImplementedError(
-            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
-        )
-
-    def ping(
-        self,
-        dest_ip,
-        count=3,
-        interval=1000,
-        timeout=1000,
-        size=25,
-        additional_ping_params=None,
-    ):
-        raise NotImplementedError(
-            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
-        )
-
-    def hard_power_cycle(self, pdus=None):
-        raise NotImplementedError(
-            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
-        )
-
-    def save_network(self, ssid):
-        raise NotImplementedError(
-            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
-        )
-
-    def clear_saved_networks(self):
-        raise NotImplementedError(
-            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
-        )
-
-    def create_iperf_client(self, test_interface=None):
-        raise NotImplementedError(
-            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
-        )
-
-    def feature_is_present(self, feature: str) -> bool:
-        raise NotImplementedError(
-            "{} must be defined.".format(inspect.currentframe().f_code.co_name)
-        )
-
-
-class AndroidWlanDevice(WlanDevice):
-    """Class wrapper for an Android WLAN device.
-
-    Each object of this class represents a generic WLAN device.
-    Android device and Fuchsia devices are the currently supported devices/
-
-    Attributes:
-        android_device: An Android WLAN device.
-    """
-
-    def __init__(self, android_device):
-        super().__init__(android_device)
-        self.identifier = android_device.serial
-
-    def wifi_toggle_state(self, state):
-        awutils.wifi_toggle_state(self.device, state)
-
-    def reset_wifi(self):
-        awutils.reset_wifi(self.device)
-
-    def take_bug_report(self, test_name=None, begin_time=None):
-        self.device.take_bug_report(test_name, begin_time)
-
-    def get_log(self, test_name, begin_time):
-        self.device.cat_adb_log(test_name, begin_time)
-
-    def turn_location_off_and_scan_toggle_off(self):
-        awutils.turn_location_off_and_scan_toggle_off(self.device)
-
-    def associate(
-        self,
-        target_ssid,
-        target_pwd=None,
-        key_mgmt=None,
-        check_connectivity=True,
-        hidden=False,
-        target_security=None,
-    ):
-        """Function to associate an Android WLAN device.
-
-        Args:
-            target_ssid: SSID to associate to.
-            target_pwd: Password for the SSID, if necessary.
-            key_mgmt: The hostapd wpa_key_mgmt value, distinguishes wpa3 from
-                wpa2 for android tests.
-            check_connectivity: Whether to check for internet connectivity.
-            hidden: Whether the network is hidden.
-        Returns:
-            True if successfully connected to WLAN, False if not.
-        """
-        network = {"SSID": target_ssid, "hiddenSSID": hidden}
-        if target_pwd:
-            network["password"] = target_pwd
-        if key_mgmt:
-            network["security"] = key_mgmt
-        try:
-            awutils.connect_to_wifi_network(
-                self.device,
-                network,
-                check_connectivity=check_connectivity,
-                hidden=hidden,
-            )
-            return True
-        except Exception as e:
-            self.device.log.info("Failed to associated (%s)" % e)
-            return False
-
-    def disconnect(self):
-        awutils.turn_location_off_and_scan_toggle_off(self.device)
-
-    def get_wlan_interface_id_list(self):
-        pass
-
-    def get_default_wlan_test_interface(self):
-        return "wlan0"
-
-    def destroy_wlan_interface(self, iface_id):
-        pass
-
-    def send_command(self, command):
-        return self.device.adb.shell(str(command))
-
-    def is_connected(self, ssid=None):
-        wifi_info = self.device.droid.wifiGetConnectionInfo()
-        if ssid:
-            return "BSSID" in wifi_info and wifi_info["SSID"] == ssid
-        return "BSSID" in wifi_info
-
-    def can_ping(
-        self,
-        dest_ip,
-        count=3,
-        interval=1000,
-        timeout=1000,
-        size=25,
-        additional_ping_params=None,
-    ):
-        return adb_shell_ping(
-            self.device, dest_ip=dest_ip, count=count, timeout=timeout
-        )
-
-    def ping(self, dest_ip, count=3, interval=1000, timeout=1000, size=25):
-        pass
-
-    def hard_power_cycle(self, pdus):
-        pass
-
-    def save_network(self, ssid):
-        pass
-
-    def clear_saved_networks(self):
-        pass
-
-    def create_iperf_client(self, test_interface=None):
-        """Returns an iperf client on the Android, without requiring a
-        specific config.
-
-        Args:
-            test_interface: optional, string, name of test interface.
-
-        Returns:
-            IPerfClient object
-        """
-        if not test_interface:
-            test_interface = self.get_default_wlan_test_interface()
-
-        return iperf_client.IPerfClientOverAdb(
-            android_device_or_serial=self.device, test_interface=test_interface
-        )
-
-    def feature_is_present(self, feature: str) -> bool:
-        pass
-
-
-class FuchsiaWlanDevice(WlanDevice):
-    """Class wrapper for an Fuchsia WLAN device.
-
-    Each object of this class represents a generic WLAN device.
-    Android device and Fuchsia devices are the currently supported devices/
-
-    Attributes:
-        fuchsia_device: A Fuchsia WLAN device.
-    """
-
-    device: FuchsiaDevice
-
-    def __init__(self, fuchsia_device):
-        super().__init__(fuchsia_device)
-        self.identifier = fuchsia_device.ip
-        self.device.configure_wlan()
-
-    def wifi_toggle_state(self, state):
-        """Stub for Fuchsia implementation."""
-
-    def reset_wifi(self):
-        """Stub for Fuchsia implementation."""
-
-    def take_bug_report(self, test_name=None, begin_time=None):
-        """Stub for Fuchsia implementation."""
-        self.device.take_bug_report(test_name, begin_time)
-
-    def get_log(self, test_name, begin_time):
-        """Stub for Fuchsia implementation."""
-
-    def turn_location_off_and_scan_toggle_off(self):
-        """Stub for Fuchsia implementation."""
-
-    def associate(
-        self,
-        target_ssid,
-        target_pwd=None,
-        key_mgmt=None,
-        check_connectivity=True,
-        hidden=False,
-        target_security=None,
-    ):
-        """Function to associate a Fuchsia WLAN device.
-
-        Args:
-            target_ssid: SSID to associate to.
-            target_pwd: Password for the SSID, if necessary.
-            key_mgmt: the hostapd wpa_key_mgmt, if specified.
-            check_connectivity: Whether to check for internet connectivity.
-            hidden: Whether the network is hidden.
-            target_security: string, target security for network, used to
-                save the network in policy connects (see wlan_policy_lib)
-        Returns:
-            True if successfully connected to WLAN, False if not.
-        """
-        if self.device.association_mechanism == "drivers":
-            bss_scan_response = self.device.sl4f.wlan_lib.wlanScanForBSSInfo()
-            if bss_scan_response.get("error"):
-                self.log.error(
-                    "Scan for BSS info failed. Err: %s" % bss_scan_response["error"]
-                )
-                return False
-
-            bss_descs_for_ssid = bss_scan_response["result"].get(target_ssid, None)
-            if not bss_descs_for_ssid or len(bss_descs_for_ssid) < 1:
-                self.log.error(
-                    "Scan failed to find a BSS description for target_ssid %s"
-                    % target_ssid
-                )
-                return False
-
-            connection_response = self.device.sl4f.wlan_lib.wlanConnectToNetwork(
-                target_ssid, bss_descs_for_ssid[0], target_pwd=target_pwd
-            )
-            return self.device.check_connect_response(connection_response)
-        else:
-            return self.device.wlan_policy_controller.save_and_connect(
-                target_ssid, target_security, password=target_pwd
-            )
-
-    def disconnect(self):
-        """Function to disconnect from a Fuchsia WLAN device.
-        Asserts if disconnect was not successful.
-        """
-        if self.device.association_mechanism == "drivers":
-            disconnect_response = self.device.sl4f.wlan_lib.wlanDisconnect()
-            return self.device.check_disconnect_response(disconnect_response)
-        else:
-            return (
-                self.device.wlan_policy_controller.remove_all_networks_and_wait_for_no_connections()
-            )
-
-    def status(self):
-        return self.device.sl4f.wlan_lib.wlanStatus()
-
-    def can_ping(
-        self,
-        dest_ip,
-        count=3,
-        interval=1000,
-        timeout=1000,
-        size=25,
-        additional_ping_params=None,
-    ):
-        return self.device.can_ping(
-            dest_ip,
-            count=count,
-            interval=interval,
-            timeout=timeout,
-            size=size,
-            additional_ping_params=additional_ping_params,
-        )
-
-    def ping(
-        self,
-        dest_ip,
-        count=3,
-        interval=1000,
-        timeout=1000,
-        size=25,
-        additional_ping_params=None,
-    ):
-        return self.device.ping(
-            dest_ip,
-            count=count,
-            interval=interval,
-            timeout=timeout,
-            size=size,
-            additional_ping_params=additional_ping_params,
-        )
-
-    def get_wlan_interface_id_list(self):
-        """Function to list available WLAN interfaces.
-
-        Returns:
-            A list of wlan interface IDs.
-        """
-        return self.device.sl4f.wlan_lib.wlanGetIfaceIdList().get("result")
-
-    def get_default_wlan_test_interface(self):
-        """Returns name of the WLAN client interface"""
-        return self.device.wlan_client_test_interface_name
-
-    def destroy_wlan_interface(self, iface_id):
-        """Function to associate a Fuchsia WLAN device.
-
-        Args:
-            target_ssid: SSID to associate to.
-            target_pwd: Password for the SSID, if necessary.
-            check_connectivity: Whether to check for internet connectivity.
-            hidden: Whether the network is hidden.
-        Returns:
-            True if successfully destroyed wlan interface, False if not.
-        """
-        result = self.device.sl4f.wlan_lib.wlanDestroyIface(iface_id)
-        if result.get("error") is None:
-            return True
-        else:
-            self.log.error(
-                "Failed to destroy interface with: {}".format(result.get("error"))
-            )
-            return False
-
-    def send_command(self, command):
-        return self.device.ssh.run(str(command)).stdout
-
-    def is_connected(self, ssid=None):
-        """Determines if wlan_device is connected to wlan network.
-
-        Args:
-            ssid (optional): string, to check if device is connect to a specific
-                network.
-
-        Returns:
-            True, if connected to a network or to the correct network when SSID
-                is provided.
-            False, if not connected or connect to incorrect network when SSID is
-                provided.
-        """
-        response = self.status()
-        if response.get("error"):
-            raise ConnectionError("Failed to get client network connection status")
-        result = response.get("result")
-        if isinstance(result, dict):
-            connected_to = result.get("Connected")
-            # TODO(https://fxbug.dev/85938): Remove backwards compatibility once
-            # ACTS is versioned with Fuchsia.
-            if not connected_to:
-                connected_to = result.get("connected_to")
-            if not connected_to:
-                return False
-
-            if ssid:
-                # Replace encoding errors instead of raising an exception.
-                # Since `ssid` is a string, this will not affect the test
-                # for equality.
-                connected_ssid = bytearray(connected_to["ssid"]).decode(
-                    encoding="utf-8", errors="replace"
-                )
-                return ssid == connected_ssid
-            return True
-        return False
-
-    def hard_power_cycle(self, pdus):
-        self.device.reboot(reboot_type="hard", testbed_pdus=pdus)
-
-    def save_network(self, target_ssid, security_type=None, target_pwd=None):
-        if self.device.association_mechanism == "drivers":
-            raise EnvironmentError(
-                "Cannot save network using the drivers. Saved networks are a "
-                "policy layer concept."
-            )
-        if security_type and security_type not in FUCHSIA_VALID_SECURITY_TYPES:
-            raise TypeError("Invalid security type: %s" % security_type)
-        if not self.device.wlan_policy_controller.save_network(
-            target_ssid, security_type, password=target_pwd
-        ):
-            raise EnvironmentError("Failed to save network: %s" % target_ssid)
-
-    def clear_saved_networks(self):
-        if self.device.association_mechanism == "drivers":
-            raise EnvironmentError(
-                "Cannot clear saved network using the drivers. Saved networks "
-                "are a policy layer concept."
-            )
-        if not self.device.wlan_policy_controller.remove_all_networks():
-            raise EnvironmentError("Failed to clear saved networks")
-
-    def create_iperf_client(self, test_interface=None):
-        """Returns an iperf client on the FuchsiaDevice, without requiring a
-        specific config.
-
-        Args:
-            test_interface: optional, string, name of test interface. Defaults
-                to first found wlan client interface.
-
-        Returns:
-            IPerfClient object
-        """
-        if not test_interface:
-            test_interface = self.get_default_wlan_test_interface()
-
-        # A package server is necessary to acquire the iperf3 client for
-        # some builds.
-        self.device.start_package_server()
-
-        return iperf_client.IPerfClientOverSsh(
-            {
-                "user": "fuchsia",
-                "host": self.device.ip,
-                "ssh_config": self.device.ssh_config,
-            },
-            ssh_provider=self.device.ssh,
-            test_interface=test_interface,
-        )
-
-    def feature_is_present(self, feature: str) -> bool:
-        return feature in self.device.wlan_features
diff --git a/src/antlion/test_utils/dhcp/base_test.py b/src/antlion/test_utils/dhcp/base_test.py
deleted file mode 100644
index 6f68c3e..0000000
--- a/src/antlion/test_utils/dhcp/base_test.py
+++ /dev/null
@@ -1,263 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-
-from antlion import utils
-from antlion.controllers.access_point import setup_ap, AccessPoint
-from antlion.controllers.ap_lib import dhcp_config
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.hostapd_security import Security
-from antlion.controllers.ap_lib.hostapd_utils import generate_random_password
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-from antlion.test_utils.wifi import base_test
-
-from mobly import asserts
-
-
-class Dhcpv4InteropFixture(base_test.WifiBaseTest):
-    """Test helpers for validating DHCPv4 Interop
-
-    Test Bed Requirement:
-    * One Android device or Fuchsia device
-    * One Access Point
-    """
-
-    def setup_class(self):
-        super().setup_class()
-
-        device_type = self.user_params.get("dut", "fuchsia_devices")
-        if device_type == "fuchsia_devices":
-            self.dut = create_wlan_device(self.fuchsia_devices[0])
-        elif device_type == "android_devices":
-            self.dut = create_wlan_device(self.android_devices[0])
-        else:
-            raise ValueError(
-                f'Invalid "dut" type specified in config: "{device_type}".'
-                'Expected "fuchsia_devices" or "android_devices".'
-            )
-
-        self.access_point: AccessPoint = self.access_points[0]
-        self.access_point.stop_all_aps()
-
-    def setup_test(self):
-        if hasattr(self, "android_devices"):
-            for ad in self.android_devices:
-                ad.droid.wakeLockAcquireBright()
-                ad.droid.wakeUpNow()
-        self.dut.wifi_toggle_state(True)
-
-    def teardown_test(self):
-        if hasattr(self, "android_devices"):
-            for ad in self.android_devices:
-                ad.droid.wakeLockRelease()
-                ad.droid.goToSleepNow()
-        self.dut.turn_location_off_and_scan_toggle_off()
-        self.dut.disconnect()
-        self.dut.reset_wifi()
-        self.access_point.stop_all_aps()
-
-    def connect(self, ap_params):
-        asserts.assert_true(
-            self.dut.associate(
-                ap_params["ssid"],
-                target_pwd=ap_params["password"],
-                target_security=ap_params["target_security"],
-            ),
-            "Failed to connect.",
-        )
-
-    def setup_ap(self):
-        """Generates a hostapd config and sets up the AP with that config.
-        Does not run a DHCP server.
-
-        Returns: A dictionary of information about the AP.
-        """
-        ssid = utils.rand_ascii_str(20)
-        security_mode = hostapd_constants.WPA2_STRING
-        security_profile = Security(
-            security_mode=security_mode,
-            password=generate_random_password(length=20),
-            wpa_cipher="CCMP",
-            wpa2_cipher="CCMP",
-        )
-        password = security_profile.password
-        target_security = (
-            hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
-                security_mode
-            )
-        )
-
-        ap_ids = setup_ap(
-            access_point=self.access_point,
-            profile_name="whirlwind",
-            mode=hostapd_constants.MODE_11N_MIXED,
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            n_capabilities=[],
-            ac_capabilities=[],
-            force_wmm=True,
-            ssid=ssid,
-            security=security_profile,
-            password=password,
-        )
-
-        if len(ap_ids) > 1:
-            raise Exception("Expected only one SSID on AP")
-
-        configured_subnets = self.access_point.get_configured_subnets()
-        if len(configured_subnets) > 1:
-            raise Exception("Expected only one subnet on AP")
-        router_ip = configured_subnets[0].router
-        network = configured_subnets[0].network
-
-        self.access_point.stop_dhcp()
-
-        return {
-            "ssid": ssid,
-            "password": password,
-            "target_security": target_security,
-            "ip": router_ip,
-            "network": network,
-            "id": ap_ids[0],
-        }
-
-    def device_can_ping(self, dest_ip):
-        """Checks if the DUT can ping the given address.
-
-        Returns: True if can ping, False otherwise"""
-        self.log.info("Attempting to ping %s..." % dest_ip)
-        ping_result = self.dut.can_ping(dest_ip, count=2)
-        if ping_result:
-            self.log.info("Success pinging: %s" % dest_ip)
-        else:
-            self.log.info("Failure pinging: %s" % dest_ip)
-        return ping_result
-
-    def get_device_ipv4_addr(self, interface=None, timeout=20):
-        """Checks if device has an ipv4 private address. Sleeps 1 second between
-        retries.
-
-        Args:
-            interface: string, name of interface from which to get ipv4 address.
-
-        Raises:
-            ConnectionError, if DUT does not have an ipv4 address after all
-            timeout.
-
-        Returns:
-            The device's IP address
-
-        """
-        self.log.debug("Fetching updated WLAN interface list")
-        if interface is None:
-            interface = self.dut.device.wlan_client_test_interface_name
-        self.log.info(
-            "Checking if DUT has received an ipv4 addr on iface %s. Will retry for %s "
-            "seconds." % (interface, timeout)
-        )
-        timeout = time.time() + timeout
-        while time.time() < timeout:
-            ip_addrs = self.dut.device.get_interface_ip_addresses(interface)
-
-            if len(ip_addrs["ipv4_private"]) > 0:
-                ip = ip_addrs["ipv4_private"][0]
-                self.log.info("DUT has an ipv4 address: %s" % ip)
-                return ip
-            else:
-                self.log.debug(
-                    "DUT does not yet have an ipv4 address...retrying in 1 " "second."
-                )
-                time.sleep(1)
-        else:
-            raise ConnectionError("DUT failed to get an ipv4 address.")
-
-    def run_test_case_expect_dhcp_success(self, _test_name, settings):
-        """Starts the AP and DHCP server, and validates that the client
-        connects and obtains an address.
-
-        Args:
-            _test_name: name of the test being run, this variable is not used
-            settings: a dictionary containing:
-                dhcp_parameters: a dictionary of DHCP parameters
-                dhcp_options: a dictionary of DHCP options
-        """
-        ap_params = self.setup_ap()
-        subnet_conf = dhcp_config.Subnet(
-            subnet=ap_params["network"],
-            router=ap_params["ip"],
-            additional_parameters=settings["dhcp_parameters"],
-            additional_options=settings["dhcp_options"],
-        )
-        dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf])
-
-        self.log.debug("DHCP Configuration:\n" + dhcp_conf.render_config_file() + "\n")
-
-        self.access_point.start_dhcp(dhcp_conf=dhcp_conf)
-        self.connect(ap_params=ap_params)
-
-        # Typical log lines look like:
-        # dhcpd[26695]: DHCPDISCOVER from f8:0f:f9:3d:ce:d1 via wlan1
-        # dhcpd[26695]: DHCPOFFER on 192.168.9.2 to f8:0f:f9:3d:ce:d1 via wlan1
-        # dhcpd[26695]: DHCPREQUEST for 192.168.9.2 (192.168.9.1) from f8:0f:f9:3d:ce:d1 via wlan1
-        # dhcpd[26695]: DHCPACK on 192.168.9.2 to f8:0f:f9:3d:ce:d1 via wlan1
-
-        try:
-            ip = self.get_device_ipv4_addr()
-        except ConnectionError:
-            self.log.warn(dhcp_logs)
-            asserts.fail(f"DUT failed to get an IP address")
-
-        # Get updates to DHCP logs
-        dhcp_logs = self.access_point.get_dhcp_logs()
-
-        expected_string = f"DHCPDISCOVER from"
-        asserts.assert_equal(
-            dhcp_logs.count(expected_string),
-            1,
-            f'Incorrect count of DHCP Discovers ("{expected_string}") in logs:\n'
-            + dhcp_logs
-            + "\n",
-        )
-
-        expected_string = f"DHCPOFFER on {ip}"
-        asserts.assert_equal(
-            dhcp_logs.count(expected_string),
-            1,
-            f'Incorrect count of DHCP Offers ("{expected_string}") in logs:\n'
-            + dhcp_logs
-            + "\n",
-        )
-
-        expected_string = f"DHCPREQUEST for {ip}"
-        asserts.assert_true(
-            dhcp_logs.count(expected_string) >= 1,
-            f'Incorrect count of DHCP Requests ("{expected_string}") in logs: '
-            + dhcp_logs
-            + "\n",
-        )
-
-        expected_string = f"DHCPACK on {ip}"
-        asserts.assert_true(
-            dhcp_logs.count(expected_string) >= 1,
-            f'Incorrect count of DHCP Acks ("{expected_string}") in logs: '
-            + dhcp_logs
-            + "\n",
-        )
-
-        asserts.assert_true(
-            self.device_can_ping(ap_params["ip"]),
-            f'DUT failed to ping router at {ap_params["ip"]}',
-        )
diff --git a/src/antlion/test_utils/fuchsia/utils.py b/src/antlion/test_utils/fuchsia/utils.py
deleted file mode 100644
index 89bbc64..0000000
--- a/src/antlion/test_utils/fuchsia/utils.py
+++ /dev/null
@@ -1,120 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-from antlion.controllers.fuchsia_lib.ssh import SSHError
-
-
-def http_file_download_by_curl(
-    fd,
-    url,
-    out_path="/tmp/",
-    curl_loc="/bin/curl",
-    remove_file_after_check=True,
-    timeout=3600,
-    limit_rate=None,
-    additional_args=None,
-    retry=3,
-):
-    """Download http file by ssh curl.
-
-    Args:
-        fd: Fuchsia Device Object.
-        url: The url that file to be downloaded from.
-        out_path: Optional. Where to download file to.
-            out_path is /tmp by default.
-        curl_loc: Location of curl binary on fd.
-        remove_file_after_check: Whether to remove the downloaded file after
-            check.
-        timeout: timeout for file download to complete.
-        limit_rate: download rate in bps. None, if do not apply rate limit.
-        additional_args: Any additional args for curl.
-        retry: the retry request times provided in curl command.
-    """
-    file_directory, file_name = _generate_file_directory_and_file_name(url, out_path)
-    file_path = os.path.join(file_directory, file_name)
-    curl_cmd = curl_loc
-    if limit_rate:
-        curl_cmd += f" --limit-rate {limit_rate}"
-    if retry:
-        curl_cmd += f" --retry {retry}"
-    if additional_args:
-        curl_cmd += f" {additional_args}"
-    curl_cmd += f" --url {url} > {file_path}"
-
-    fd.log.info(f"Download {url} to {file_path} by ssh command {curl_cmd}")
-    try:
-        fd.ssh.run(curl_cmd, timeout_sec=timeout)
-        if _check_file_existence(fd, file_path):
-            fd.log.info(f"{url} is downloaded to {file_path} successfully")
-            return True
-
-        fd.log.warning(f"Fail to download {url}")
-        return False
-    except SSHError as e:
-        fd.log.warning(f'Command "{curl_cmd}" failed with error {e}')
-        return False
-    except Exception as e:
-        fd.log.error(f"Download {url} failed with unexpected exception {e}")
-        return False
-    finally:
-        if remove_file_after_check:
-            fd.log.info(f"Remove the downloaded file {file_path}")
-            try:
-                fd.ssh.run(f"rm {file_path}")
-            except SSHError:
-                pass
-
-
-def _generate_file_directory_and_file_name(url, out_path):
-    """Splits the file from the url and specifies the appropriate location of
-       where to store the downloaded file.
-
-    Args:
-        url: A url to the file that is going to be downloaded.
-        out_path: The location of where to store the file that is downloaded.
-
-    Returns:
-        file_directory: The directory of where to store the downloaded file.
-        file_name: The name of the file that is being downloaded.
-    """
-    file_name = url.split("/")[-1]
-    if not out_path:
-        file_directory = "/tmp/"
-    elif not out_path.endswith("/"):
-        file_directory, file_name = os.path.split(out_path)
-    else:
-        file_directory = out_path
-    return file_directory, file_name
-
-
-def _check_file_existence(fd, file_path):
-    """Check file existence by file_path. If expected_file_size
-       is provided, then also check if the file meet the file size requirement.
-
-    Args:
-        fd: A fuchsia device
-        file_path: Where to store the file on the fuchsia device.
-    """
-    try:
-        result = fd.ssh.run(f'ls -al "{file_path}"')
-        fd.log.debug(f"File {file_path} exists.")
-        return True
-    except SSHError as e:
-        if "No such file or directory" in e.result.stderr:
-            fd.log.debug(f"File {file_path} does not exist.")
-            return False
-        raise e
diff --git a/src/antlion/test_utils/net/net_test_utils.py b/src/antlion/test_utils/net/net_test_utils.py
deleted file mode 100644
index 4eb47ac..0000000
--- a/src/antlion/test_utils/net/net_test_utils.py
+++ /dev/null
@@ -1,582 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import logging
-import os
-import re
-import time
-import urllib.request
-
-from antlion import signals
-from antlion import utils
-from antlion.controllers import adb
-from antlion.controllers.adb_lib.error import AdbError
-from antlion.libs.proc import job
-from antlion.utils import start_standing_subprocess
-from antlion.utils import stop_standing_subprocess
-from antlion.test_utils.net import connectivity_const as cconst
-
-from mobly import asserts
-
-VPN_CONST = cconst.VpnProfile
-VPN_TYPE = cconst.VpnProfileType
-VPN_PARAMS = cconst.VpnReqParams
-TCPDUMP_PATH = "/data/local/tmp/"
-USB_CHARGE_MODE = "svc usb setFunctions"
-USB_TETHERING_MODE = "svc usb setFunctions rndis"
-ENABLE_HARDWARE_OFFLOAD = "settings put global tether_offload_disabled 0"
-DISABLE_HARDWARE_OFFLOAD = "settings put global tether_offload_disabled 1"
-DEVICE_IP_ADDRESS = "ip address"
-LOCALHOST = "192.168.1.1"
-
-# Time to wait for radio to up and running after reboot
-WAIT_TIME_AFTER_REBOOT = 10
-
-GCE_SSH = "gcloud compute ssh "
-GCE_SCP = "gcloud compute scp "
-
-
-def set_chrome_browser_permissions(ad):
-    """Set chrome browser start with no-first-run verification.
-
-    Give permission to read from and write to storage
-
-    Args:
-        ad: android device object
-    """
-    commands = [
-        "pm grant com.android.chrome " "android.permission.READ_EXTERNAL_STORAGE",
-        "pm grant com.android.chrome " "android.permission.WRITE_EXTERNAL_STORAGE",
-        "rm /data/local/chrome-command-line",
-        "am set-debug-app --persistent com.android.chrome",
-        'echo "chrome --no-default-browser-check --no-first-run '
-        '--disable-fre" > /data/local/tmp/chrome-command-line',
-    ]
-    for cmd in commands:
-        try:
-            ad.adb.shell(cmd)
-        except AdbError:
-            logging.warning("adb command %s failed on %s" % (cmd, ad.serial))
-
-
-def verify_ping_to_vpn_ip(ad, vpn_ping_addr):
-    """Verify if IP behind VPN server is pingable.
-
-    Ping should pass, if VPN is connected.
-    Ping should fail, if VPN is disconnected.
-
-    Args:
-        ad: android device object
-        vpn_ping_addr: target ping addr
-    """
-    ping_result = None
-    pkt_loss = "100% packet loss"
-    logging.info("Pinging: %s" % vpn_ping_addr)
-    try:
-        ping_result = ad.adb.shell("ping -c 3 -W 2 %s" % vpn_ping_addr)
-    except AdbError:
-        pass
-    return ping_result and pkt_loss not in ping_result
-
-
-def legacy_vpn_connection_test_logic(ad, vpn_profile, vpn_ping_addr):
-    """Test logic for each legacy VPN connection.
-
-    Steps:
-      1. Generate profile for the VPN type
-      2. Establish connection to the server
-      3. Verify that connection is established using LegacyVpnInfo
-      4. Verify the connection by pinging the IP behind VPN
-      5. Stop the VPN connection
-      6. Check the connection status
-      7. Verify that ping to IP behind VPN fails
-
-    Args:
-        ad: Android device object
-        vpn_profile: object contains attribute for create vpn profile
-        vpn_ping_addr: addr to verify vpn connection
-    """
-    # Wait for sometime so that VPN server flushes all interfaces and
-    # connections after graceful termination
-    time.sleep(10)
-
-    ad.adb.shell("ip xfrm state flush")
-    ad.log.info("Connecting to: %s", vpn_profile)
-    ad.droid.vpnStartLegacyVpn(vpn_profile)
-    time.sleep(cconst.VPN_TIMEOUT)
-
-    connected_vpn_info = ad.droid.vpnGetLegacyVpnInfo()
-    asserts.assert_equal(
-        connected_vpn_info["state"],
-        cconst.VPN_STATE_CONNECTED,
-        "Unable to establish VPN connection for %s" % vpn_profile,
-    )
-
-    ping_result = verify_ping_to_vpn_ip(ad, vpn_ping_addr)
-    ip_xfrm_state = ad.adb.shell("ip xfrm state")
-    match_obj = re.search(r"hmac(.*)", "%s" % ip_xfrm_state)
-    if match_obj:
-        ip_xfrm_state = format(match_obj.group(0)).split()
-        ad.log.info("HMAC for ESP is %s " % ip_xfrm_state[0])
-
-    ad.droid.vpnStopLegacyVpn()
-    asserts.assert_true(
-        ping_result,
-        "Ping to the internal IP failed. " "Expected to pass as VPN is connected",
-    )
-
-    connected_vpn_info = ad.droid.vpnGetLegacyVpnInfo()
-    asserts.assert_true(
-        not connected_vpn_info,
-        "Unable to terminate VPN connection for %s" % vpn_profile,
-    )
-
-
-def download_load_certs(
-    ad, vpn_params, vpn_type, vpn_server_addr, ipsec_server_type, log_path
-):
-    """Download the certificates from VPN server and push to sdcard of DUT.
-
-    Args:
-      ad: android device object
-      vpn_params: vpn params from config file
-      vpn_type: 1 of the 6 VPN types
-      vpn_server_addr: server addr to connect to
-      ipsec_server_type: ipsec version - strongswan or openswan
-      log_path: log path to download cert
-
-    Returns:
-      Client cert file name on DUT's sdcard
-    """
-    url = "http://%s%s%s" % (
-        vpn_server_addr,
-        vpn_params["cert_path_vpnserver"],
-        vpn_params["client_pkcs_file_name"],
-    )
-    logging.info("URL is: %s" % url)
-    if vpn_server_addr == LOCALHOST:
-        ad.droid.httpDownloadFile(url, "/sdcard/")
-        return vpn_params["client_pkcs_file_name"]
-
-    local_cert_name = "%s_%s_%s" % (
-        vpn_type.name,
-        ipsec_server_type,
-        vpn_params["client_pkcs_file_name"],
-    )
-    local_file_path = os.path.join(log_path, local_cert_name)
-    try:
-        ret = urllib.request.urlopen(url)
-        with open(local_file_path, "wb") as f:
-            f.write(ret.read())
-    except Exception:
-        asserts.fail("Unable to download certificate from the server")
-
-    ad.adb.push("%s sdcard/" % local_file_path)
-    return local_cert_name
-
-
-def generate_legacy_vpn_profile(
-    ad, vpn_params, vpn_type, vpn_server_addr, ipsec_server_type, log_path
-):
-    """Generate legacy VPN profile for a VPN.
-
-    Args:
-      ad: android device object
-      vpn_params: vpn params from config file
-      vpn_type: 1 of the 6 VPN types
-      vpn_server_addr: server addr to connect to
-      ipsec_server_type: ipsec version - strongswan or openswan
-      log_path: log path to download cert
-
-    Returns:
-      Vpn profile
-    """
-    vpn_profile = {
-        VPN_CONST.USER: vpn_params["vpn_username"],
-        VPN_CONST.PWD: vpn_params["vpn_password"],
-        VPN_CONST.TYPE: vpn_type.value,
-        VPN_CONST.SERVER: vpn_server_addr,
-    }
-    vpn_profile[VPN_CONST.NAME] = "test_%s_%s" % (vpn_type.name, ipsec_server_type)
-    if vpn_type.name == "PPTP":
-        vpn_profile[VPN_CONST.NAME] = "test_%s" % vpn_type.name
-
-    psk_set = set(["L2TP_IPSEC_PSK", "IPSEC_XAUTH_PSK"])
-    rsa_set = set(["L2TP_IPSEC_RSA", "IPSEC_XAUTH_RSA", "IPSEC_HYBRID_RSA"])
-
-    if vpn_type.name in psk_set:
-        vpn_profile[VPN_CONST.IPSEC_SECRET] = vpn_params["psk_secret"]
-    elif vpn_type.name in rsa_set:
-        cert_name = download_load_certs(
-            ad, vpn_params, vpn_type, vpn_server_addr, ipsec_server_type, log_path
-        )
-        vpn_profile[VPN_CONST.IPSEC_USER_CERT] = cert_name.split(".")[0]
-        ad.droid.installCertificate(vpn_profile, cert_name, vpn_params["cert_password"])
-    else:
-        vpn_profile[VPN_CONST.MPPE] = "mppe"
-
-    return vpn_profile
-
-
-def generate_ikev2_vpn_profile(ad, vpn_params, vpn_type, server_addr, log_path):
-    """Generate VPN profile for IKEv2 VPN.
-
-    Args:
-        ad: android device object.
-        vpn_params: vpn params from config file.
-        vpn_type: ikev2 vpn type.
-        server_addr: vpn server addr.
-        log_path: log path to download cert.
-
-    Returns:
-        Vpn profile.
-    """
-    vpn_profile = {
-        VPN_CONST.TYPE: vpn_type.value,
-        VPN_CONST.SERVER: server_addr,
-    }
-
-    if vpn_type.name == "IKEV2_IPSEC_USER_PASS":
-        vpn_profile[VPN_CONST.USER] = vpn_params["vpn_username"]
-        vpn_profile[VPN_CONST.PWD] = vpn_params["vpn_password"]
-        vpn_profile[VPN_CONST.IPSEC_ID] = vpn_params["vpn_identity"]
-        cert_name = download_load_certs(
-            ad,
-            vpn_params,
-            vpn_type,
-            vpn_params["server_addr"],
-            "IKEV2_IPSEC_USER_PASS",
-            log_path,
-        )
-        vpn_profile[VPN_CONST.IPSEC_CA_CERT] = cert_name.split(".")[0]
-        ad.droid.installCertificate(vpn_profile, cert_name, vpn_params["cert_password"])
-    elif vpn_type.name == "IKEV2_IPSEC_PSK":
-        vpn_profile[VPN_CONST.IPSEC_ID] = vpn_params["vpn_identity"]
-        vpn_profile[VPN_CONST.IPSEC_SECRET] = vpn_params["psk_secret"]
-    else:
-        vpn_profile[VPN_CONST.IPSEC_ID] = "%s@%s" % (
-            vpn_params["vpn_identity"],
-            server_addr,
-        )
-        logging.info("ID: %s@%s" % (vpn_params["vpn_identity"], server_addr))
-        cert_name = download_load_certs(
-            ad,
-            vpn_params,
-            vpn_type,
-            vpn_params["server_addr"],
-            "IKEV2_IPSEC_RSA",
-            log_path,
-        )
-        vpn_profile[VPN_CONST.IPSEC_USER_CERT] = cert_name.split(".")[0]
-        vpn_profile[VPN_CONST.IPSEC_CA_CERT] = cert_name.split(".")[0]
-        ad.droid.installCertificate(vpn_profile, cert_name, vpn_params["cert_password"])
-
-    return vpn_profile
-
-
-def start_tcpdump(ad, test_name, interface="any"):
-    """Start tcpdump on all interfaces.
-
-    Args:
-        ad: android device object.
-        test_name: tcpdump file name will have this
-    """
-    ad.log.info("Starting tcpdump on all interfaces")
-    ad.adb.shell("killall -9 tcpdump", ignore_status=True)
-    ad.adb.shell("mkdir %s" % TCPDUMP_PATH, ignore_status=True)
-    ad.adb.shell("rm -rf %s/*" % TCPDUMP_PATH, ignore_status=True)
-
-    file_name = "%s/tcpdump_%s_%s.pcap" % (TCPDUMP_PATH, ad.serial, test_name)
-    ad.log.info("tcpdump file is %s", file_name)
-    cmd = "adb -s {} shell tcpdump -i {} -s0 -w {}".format(
-        ad.serial, interface, file_name
-    )
-    try:
-        return start_standing_subprocess(cmd, 5)
-    except Exception:
-        ad.log.exception("Could not start standing process %s" % repr(cmd))
-
-    return None
-
-
-def stop_tcpdump(
-    ad, proc, test_name, pull_dump=True, adb_pull_timeout=adb.DEFAULT_ADB_PULL_TIMEOUT
-):
-    """Stops tcpdump on any iface.
-
-       Pulls the tcpdump file in the tcpdump dir if necessary.
-
-    Args:
-        ad: android device object.
-        proc: need to know which pid to stop
-        test_name: test name to save the tcpdump file
-        pull_dump: pull tcpdump file or not
-        adb_pull_timeout: timeout for adb_pull
-
-    Returns:
-      log_path of the tcpdump file
-    """
-    ad.log.info("Stopping and pulling tcpdump if any")
-    if proc is None:
-        return None
-    try:
-        stop_standing_subprocess(proc)
-    except Exception as e:
-        ad.log.warning(e)
-    if pull_dump:
-        log_path = os.path.join(ad.device_log_path, "TCPDUMP_%s" % ad.serial)
-        os.makedirs(log_path, exist_ok=True)
-        ad.adb.pull("%s/. %s" % (TCPDUMP_PATH, log_path), timeout=adb_pull_timeout)
-        ad.adb.shell("rm -rf %s/*" % TCPDUMP_PATH, ignore_status=True)
-        file_name = "tcpdump_%s_%s.pcap" % (ad.serial, test_name)
-        return "%s/%s" % (log_path, file_name)
-    return None
-
-
-def start_tcpdump_gce_server(ad, test_name, dest_port, gce):
-    """Start tcpdump on gce server.
-
-    Args:
-        ad: android device object
-        test_name: test case name
-        dest_port: port to collect tcpdump
-        gce: dictionary of gce instance
-
-    Returns:
-       process id and pcap file path from gce server
-    """
-    ad.log.info("Starting tcpdump on gce server")
-
-    # pcap file name
-    fname = "/tmp/%s_%s_%s_%s" % (
-        test_name,
-        ad.model,
-        ad.serial,
-        time.strftime("%Y-%m-%d_%H-%M-%S", time.localtime(time.time())),
-    )
-
-    # start tcpdump
-    tcpdump_cmd = (
-        "sudo bash -c 'tcpdump -i %s -w %s.pcap port %s > \
-        %s.txt 2>&1 & echo $!'"
-        % (gce["interface"], fname, dest_port, fname)
-    )
-    gcloud_ssh_cmd = "%s --project=%s --zone=%s %s@%s --command " % (
-        GCE_SSH,
-        gce["project"],
-        gce["zone"],
-        gce["username"],
-        gce["hostname"],
-    )
-    gce_ssh_cmd = '%s "%s"' % (gcloud_ssh_cmd, tcpdump_cmd)
-    utils.exe_cmd(gce_ssh_cmd)
-
-    # get process id
-    ps_cmd = '%s "ps aux | grep tcpdump | grep %s"' % (gcloud_ssh_cmd, fname)
-    tcpdump_pid = utils.exe_cmd(ps_cmd).decode("utf-8", "ignore").split()
-    if not tcpdump_pid:
-        raise signals.TestFailure("Failed to start tcpdump on gce server")
-    return tcpdump_pid[1], fname
-
-
-def stop_tcpdump_gce_server(ad, tcpdump_pid, fname, gce):
-    """Stop and pull tcpdump file from gce server.
-
-    Args:
-        ad: android device object
-        tcpdump_pid: process id for tcpdump file
-        fname: tcpdump file path
-        gce: dictionary of gce instance
-
-    Returns:
-       pcap file from gce server
-    """
-    ad.log.info("Stop and pull pcap file from gce server")
-
-    # stop tcpdump
-    tcpdump_cmd = "sudo kill %s" % tcpdump_pid
-    gcloud_ssh_cmd = "%s --project=%s --zone=%s %s@%s --command " % (
-        GCE_SSH,
-        gce["project"],
-        gce["zone"],
-        gce["username"],
-        gce["hostname"],
-    )
-    gce_ssh_cmd = '%s "%s"' % (gcloud_ssh_cmd, tcpdump_cmd)
-    utils.exe_cmd(gce_ssh_cmd)
-
-    # verify tcpdump is stopped
-    ps_cmd = '%s "ps aux | grep tcpdump"' % gcloud_ssh_cmd
-    res = utils.exe_cmd(ps_cmd).decode("utf-8", "ignore")
-    if tcpdump_pid in res.split():
-        raise signals.TestFailure("Failed to stop tcpdump on gce server")
-    if not fname:
-        return None
-
-    # pull pcap file
-    gcloud_scp_cmd = "%s --project=%s --zone=%s %s@%s:" % (
-        GCE_SCP,
-        gce["project"],
-        gce["zone"],
-        gce["username"],
-        gce["hostname"],
-    )
-    pull_file = "%s%s.pcap %s/" % (gcloud_scp_cmd, fname, ad.device_log_path)
-    utils.exe_cmd(pull_file)
-    if not os.path.exists("%s/%s.pcap" % (ad.device_log_path, fname.split("/")[-1])):
-        raise signals.TestFailure("Failed to pull tcpdump from gce server")
-
-    # delete pcaps
-    utils.exe_cmd('%s "sudo rm %s.*"' % (gcloud_ssh_cmd, fname))
-
-    # return pcap file
-    pcap_file = "%s/%s.pcap" % (ad.device_log_path, fname.split("/")[-1])
-    return pcap_file
-
-
-def is_ipaddress_ipv6(ip_address):
-    """Verify if the given string is a valid IPv6 address.
-
-    Args:
-        ip_address: string containing the IP address
-
-    Returns:
-        True: if valid ipv6 address
-        False: if not
-    """
-    try:
-        socket.inet_pton(socket.AF_INET6, ip_address)
-        return True
-    except socket.error:
-        return False
-
-
-def set_cap_net_raw_capability():
-    """Set the CAP_NET_RAW capability
-
-    To send the Scapy packets, we need to get the CAP_NET_RAW capability first.
-    """
-    cap_net_raw = "sudo setcap cap_net_raw=eip $(readlink -f $(which act.py))"
-    utils.exe_cmd(cap_net_raw)
-    cap_python = "sudo setcap cap_net_raw=eip $(readlink -f $(which python))"
-    utils.exe_cmd(cap_python)
-
-
-def stop_usb_tethering(ad):
-    """Stop USB tethering.
-
-    Args:
-        ad: android device object
-    """
-    ad.log.info("Stopping USB Tethering")
-    ad.stop_services()
-    ad.adb.shell(USB_CHARGE_MODE)
-    ad.adb.wait_for_device()
-    ad.start_services()
-
-
-def wait_for_new_iface(old_ifaces):
-    """Wait for the new interface to come up.
-
-    Args:
-        old_ifaces: list of old interfaces
-    """
-    old_set = set(old_ifaces)
-    # Try 10 times to find a new interface with a 1s sleep every time
-    # (equivalent to a 9s timeout)
-    for _ in range(0, 10):
-        new_ifaces = set(get_if_list()) - old_set
-        asserts.assert_true(
-            len(new_ifaces) < 2, "Too many new interfaces after turning on " "tethering"
-        )
-        if len(new_ifaces) == 1:
-            # enable the new iface before return
-            new_iface = new_ifaces.pop()
-            enable_iface(new_iface)
-            return new_iface
-        time.sleep(1)
-    asserts.fail("Timeout waiting for tethering interface on host")
-
-
-def get_if_list():
-    """Returns a list containing all network interfaces.
-
-    The newest version of Scapy.get_if_list() returns the cached interfaces,
-    which might be out-dated, and unable to perceive the interface changes.
-    Use this method when need to monitoring the network interfaces changes.
-    Reference: https://github.com/secdev/scapy/pull/2707
-
-    Returns:
-        A list of the latest network interfaces. For example:
-        ['cvd-ebr', ..., 'eno1', 'enx4afa19a8dde1', 'lo', 'wlxd03745d68d88']
-    """
-    from scapy.config import conf
-    from scapy.compat import plain_str
-
-    # Get ifconfig output
-    result = job.run([conf.prog.ifconfig])
-    if result.exit_status:
-        raise asserts.fail(
-            "Failed to execute ifconfig: {}".format(plain_str(result.stderr))
-        )
-
-    interfaces = [
-        line[: line.find(":")]
-        for line in plain_str(result.stdout).splitlines()
-        if ": flags" in line.lower()
-    ]
-    return interfaces
-
-
-def enable_hardware_offload(ad):
-    """Enable hardware offload using adb shell command.
-
-    Args:
-        ad: Android device object
-    """
-    ad.log.info("Enabling hardware offload.")
-    ad.adb.shell(ENABLE_HARDWARE_OFFLOAD, ignore_status=True)
-    ad.reboot()
-    time.sleep(WAIT_TIME_AFTER_REBOOT)
-
-
-def disable_hardware_offload(ad):
-    """Disable hardware offload using adb shell command.
-
-    Args:
-        ad: Android device object
-    """
-    ad.log.info("Disabling hardware offload.")
-    ad.adb.shell(DISABLE_HARDWARE_OFFLOAD, ignore_status=True)
-    ad.reboot()
-    time.sleep(WAIT_TIME_AFTER_REBOOT)
-
-
-def enable_iface(iface):
-    """Enable network interfaces.
-
-    Some network interface might disabled as default, need to enable before
-    using it.
-
-    Args:
-        iface: network interface that need to enable
-    """
-    from scapy.compat import plain_str
-
-    result = job.run("sudo ifconfig %s up" % (iface), ignore_status=True)
-    if result.exit_status:
-        raise asserts.fail(
-            "Failed to execute ifconfig: {}".format(plain_str(result.stderr))
-        )
diff --git a/src/antlion/test_utils/wifi/base_test.py b/src/antlion/test_utils/wifi/base_test.py
deleted file mode 100644
index 7e97d8f..0000000
--- a/src/antlion/test_utils/wifi/base_test.py
+++ /dev/null
@@ -1,1058 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-    Base Class for Defining Common WiFi Test Functionality
-"""
-
-import copy
-import os
-import time
-
-from antlion import context
-from antlion import signals
-from antlion import utils
-from antlion.base_test import BaseTestClass
-from antlion.controllers.ap_lib import hostapd_ap_preset
-from antlion.controllers.ap_lib import hostapd_bss_settings
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_security
-from antlion.keys import Config
-from antlion.test_utils.net import net_test_utils as nutils
-from antlion.test_utils.wifi import wifi_test_utils as wutils
-
-from mobly import asserts
-from mobly.base_test import STAGE_NAME_TEARDOWN_CLASS
-
-WifiEnums = wutils.WifiEnums
-AP_1 = 0
-AP_2 = 1
-MAX_AP_COUNT = 2
-
-
-class WifiBaseTest(BaseTestClass):
-    def __init__(self, configs):
-        super().__init__(configs)
-        self.enable_packet_log = False
-        self.packet_log_2g = hostapd_constants.AP_DEFAULT_CHANNEL_2G
-        self.packet_log_5g = hostapd_constants.AP_DEFAULT_CHANNEL_5G
-        self.tcpdump_proc = []
-        self.packet_log_pid = {}
-
-    def setup_class(self):
-        if hasattr(self, "attenuators") and self.attenuators:
-            for attenuator in self.attenuators:
-                attenuator.set_atten(0)
-        opt_param = ["pixel_models", "cnss_diag_file", "country_code_file"]
-        self.unpack_userparams(opt_param_names=opt_param)
-        if hasattr(self, "cnss_diag_file"):
-            if isinstance(self.cnss_diag_file, list):
-                self.cnss_diag_file = self.cnss_diag_file[0]
-            if not os.path.isfile(self.cnss_diag_file):
-                self.cnss_diag_file = os.path.join(
-                    self.user_params[Config.key_config_path.value], self.cnss_diag_file
-                )
-        if self.enable_packet_log and hasattr(self, "packet_capture"):
-            self.packet_logger = self.packet_capture[0]
-            self.packet_logger.configure_monitor_mode("2G", self.packet_log_2g)
-            self.packet_logger.configure_monitor_mode("5G", self.packet_log_5g)
-        if hasattr(self, "android_devices"):
-            for ad in self.android_devices:
-                wutils.wifi_test_device_init(ad)
-                if hasattr(self, "country_code_file"):
-                    if isinstance(self.country_code_file, list):
-                        self.country_code_file = self.country_code_file[0]
-                    if not os.path.isfile(self.country_code_file):
-                        self.country_code_file = os.path.join(
-                            self.user_params[Config.key_config_path.value],
-                            self.country_code_file,
-                        )
-                    self.country_code = utils.load_config(self.country_code_file)[
-                        "country"
-                    ]
-                else:
-                    self.country_code = WifiEnums.CountryCode.US
-                wutils.set_wifi_country_code(ad, self.country_code)
-
-    def setup_test(self):
-        if (
-            hasattr(self, "android_devices")
-            and hasattr(self, "cnss_diag_file")
-            and hasattr(self, "pixel_models")
-        ):
-            wutils.start_cnss_diags(
-                self.android_devices, self.cnss_diag_file, self.pixel_models
-            )
-        self.tcpdump_proc = []
-        if hasattr(self, "android_devices"):
-            for ad in self.android_devices:
-                proc = nutils.start_tcpdump(ad, self.test_name)
-                self.tcpdump_proc.append((ad, proc))
-        if hasattr(self, "packet_logger"):
-            self.packet_log_pid = wutils.start_pcap(
-                self.packet_logger, "dual", self.test_name
-            )
-
-    def teardown_test(self):
-        if (
-            hasattr(self, "android_devices")
-            and hasattr(self, "cnss_diag_file")
-            and hasattr(self, "pixel_models")
-        ):
-            wutils.stop_cnss_diags(self.android_devices, self.pixel_models)
-            for proc in self.tcpdump_proc:
-                nutils.stop_tcpdump(proc[0], proc[1], self.test_name, pull_dump=False)
-            self.tcpdump_proc = []
-        if hasattr(self, "packet_logger") and self.packet_log_pid:
-            wutils.stop_pcap(self.packet_logger, self.packet_log_pid, test_status=True)
-            self.packet_log_pid = {}
-
-    def teardown_class(self):
-        begin_time = utils.get_current_epoch_time()
-        super().teardown_class()
-        for device in getattr(self, "fuchsia_devices", []):
-            device.take_bug_report(STAGE_NAME_TEARDOWN_CLASS, begin_time)
-
-    def on_fail(self, test_name, begin_time):
-        if hasattr(self, "android_devices"):
-            for ad in self.android_devices:
-                ad.take_bug_report(test_name, begin_time)
-                ad.cat_adb_log(test_name, begin_time)
-                wutils.get_ssrdumps(ad)
-            if hasattr(self, "cnss_diag_file") and hasattr(self, "pixel_models"):
-                wutils.stop_cnss_diags(self.android_devices, self.pixel_models)
-                for ad in self.android_devices:
-                    wutils.get_cnss_diag_log(ad)
-            for proc in self.tcpdump_proc:
-                nutils.stop_tcpdump(proc[0], proc[1], self.test_name)
-            self.tcpdump_proc = []
-        if hasattr(self, "packet_logger") and self.packet_log_pid:
-            wutils.stop_pcap(self.packet_logger, self.packet_log_pid, test_status=False)
-            self.packet_log_pid = {}
-
-        # Gets a wlan_device log and calls the generic device fail on DUT.
-        for device in getattr(self, "fuchsia_devices", []):
-            self.on_device_fail(device, test_name, begin_time)
-
-    def on_device_fail(self, device, test_name, begin_time):
-        """Gets a generic device DUT bug report.
-
-        This method takes a bug report if the device has the
-        'take_bug_report_on_fail' config value, and if the flag is true. This
-        method also power cycles if 'hard_reboot_on_fail' is True.
-
-        Args:
-            device: Generic device to gather logs from.
-            test_name: Name of the test that triggered this function.
-            begin_time: Logline format timestamp taken when the test started.
-        """
-        if (
-            not hasattr(device, "take_bug_report_on_fail")
-            or device.take_bug_report_on_fail
-        ):
-            device.take_bug_report(test_name, begin_time)
-
-        if hasattr(device, "hard_reboot_on_fail") and device.hard_reboot_on_fail:
-            device.reboot(reboot_type="hard", testbed_pdus=self.pdu_devices)
-
-    def download_ap_logs(self):
-        """Downloads the DHCP and hostapad logs from the access_point.
-
-        Using the current TestClassContext and TestCaseContext this method pulls
-        the DHCP and hostapd logs and outputs them to the correct path.
-        """
-        current_path = context.get_current_context().get_full_output_path()
-
-        dhcp_log = self.access_point.get_dhcp_logs()
-        if dhcp_log:
-            dhcp_log_path = os.path.join(current_path, "dhcp_log.txt")
-            with open(dhcp_log_path, "w") as f:
-                f.write(dhcp_log)
-
-        hostapd_logs = self.access_point.get_hostapd_logs()
-        for interface in hostapd_logs:
-            hostapd_log_path = os.path.join(
-                current_path, f"hostapd_log_{interface}.txt"
-            )
-            with open(hostapd_log_path, "w") as f:
-                f.write(hostapd_logs[interface])
-
-        radvd_log = self.access_point.get_radvd_logs()
-        if radvd_log:
-            radvd_log_path = os.path.join(current_path, "radvd_log.txt")
-            with open(radvd_log_path, "w") as f:
-                f.write(radvd_log)
-
-    def get_psk_network(
-        self,
-        mirror_ap,
-        reference_networks,
-        hidden=False,
-        same_ssid=False,
-        security_mode=hostapd_constants.WPA2_STRING,
-        ssid_length_2g=hostapd_constants.AP_SSID_LENGTH_2G,
-        ssid_length_5g=hostapd_constants.AP_SSID_LENGTH_5G,
-        passphrase_length_2g=hostapd_constants.AP_PASSPHRASE_LENGTH_2G,
-        passphrase_length_5g=hostapd_constants.AP_PASSPHRASE_LENGTH_5G,
-    ):
-        """Generates SSID and passphrase for a WPA2 network using random
-        generator.
-
-        Args:
-            mirror_ap: Boolean, determines if both APs use the same hostapd
-                       config or different configs.
-            reference_networks: List of PSK networks.
-            same_ssid: Boolean, determines if both bands on AP use the same
-                       SSID.
-            ssid_length_2gecond AP Int, number of characters to use for 2G SSID.
-            ssid_length_5g: Int, number of characters to use for 5G SSID.
-            passphrase_length_2g: Int, length of password for 2G network.
-            passphrase_length_5g: Int, length of password for 5G network.
-
-        Returns: A dict of 2G and 5G network lists for hostapd configuration.
-
-        """
-        network_dict_2g = {}
-        network_dict_5g = {}
-        ref_5g_security = security_mode
-        ref_2g_security = security_mode
-
-        if same_ssid:
-            ref_2g_ssid = "xg_%s" % utils.rand_ascii_str(ssid_length_2g)
-            ref_5g_ssid = ref_2g_ssid
-
-            ref_2g_passphrase = utils.rand_ascii_str(passphrase_length_2g)
-            ref_5g_passphrase = ref_2g_passphrase
-
-        else:
-            ref_2g_ssid = "2g_%s" % utils.rand_ascii_str(ssid_length_2g)
-            ref_2g_passphrase = utils.rand_ascii_str(passphrase_length_2g)
-
-            ref_5g_ssid = "5g_%s" % utils.rand_ascii_str(ssid_length_5g)
-            ref_5g_passphrase = utils.rand_ascii_str(passphrase_length_5g)
-
-        network_dict_2g = {
-            "SSID": ref_2g_ssid,
-            "security": ref_2g_security,
-            "password": ref_2g_passphrase,
-            "hiddenSSID": hidden,
-        }
-
-        network_dict_5g = {
-            "SSID": ref_5g_ssid,
-            "security": ref_5g_security,
-            "password": ref_5g_passphrase,
-            "hiddenSSID": hidden,
-        }
-
-        ap = 0
-        for ap in range(MAX_AP_COUNT):
-            reference_networks.append(
-                {"2g": copy.copy(network_dict_2g), "5g": copy.copy(network_dict_5g)}
-            )
-            if not mirror_ap:
-                break
-        return {"2g": network_dict_2g, "5g": network_dict_5g}
-
-    def get_open_network(
-        self,
-        mirror_ap,
-        open_network,
-        hidden=False,
-        same_ssid=False,
-        ssid_length_2g=hostapd_constants.AP_SSID_LENGTH_2G,
-        ssid_length_5g=hostapd_constants.AP_SSID_LENGTH_5G,
-        security_mode="none",
-    ):
-        """Generates SSIDs for a open network using a random generator.
-
-        Args:
-            mirror_ap: Boolean, determines if both APs use the same hostapd
-                       config or different configs.
-            open_network: List of open networks.
-            same_ssid: Boolean, determines if both bands on AP use the same
-                       SSID.
-            ssid_length_2g: Int, number of characters to use for 2G SSID.
-            ssid_length_5g: Int, number of characters to use for 5G SSID.
-            security_mode: 'none' for open and 'OWE' for WPA3 OWE.
-
-        Returns: A dict of 2G and 5G network lists for hostapd configuration.
-
-        """
-        network_dict_2g = {}
-        network_dict_5g = {}
-
-        if same_ssid:
-            open_2g_ssid = "xg_%s" % utils.rand_ascii_str(ssid_length_2g)
-            open_5g_ssid = open_2g_ssid
-
-        else:
-            open_2g_ssid = "2g_%s" % utils.rand_ascii_str(ssid_length_2g)
-            open_5g_ssid = "5g_%s" % utils.rand_ascii_str(ssid_length_5g)
-
-        network_dict_2g = {
-            "SSID": open_2g_ssid,
-            "security": security_mode,
-            "hiddenSSID": hidden,
-        }
-
-        network_dict_5g = {
-            "SSID": open_5g_ssid,
-            "security": security_mode,
-            "hiddenSSID": hidden,
-        }
-
-        ap = 0
-        for ap in range(MAX_AP_COUNT):
-            open_network.append(
-                {"2g": copy.copy(network_dict_2g), "5g": copy.copy(network_dict_5g)}
-            )
-            if not mirror_ap:
-                break
-        return {"2g": network_dict_2g, "5g": network_dict_5g}
-
-    def get_wep_network(
-        self,
-        mirror_ap,
-        networks,
-        hidden=False,
-        same_ssid=False,
-        ssid_length_2g=hostapd_constants.AP_SSID_LENGTH_2G,
-        ssid_length_5g=hostapd_constants.AP_SSID_LENGTH_5G,
-        passphrase_length_2g=hostapd_constants.AP_PASSPHRASE_LENGTH_2G,
-        passphrase_length_5g=hostapd_constants.AP_PASSPHRASE_LENGTH_5G,
-    ):
-        """Generates SSID and passphrase for a WEP network using random
-        generator.
-
-        Args:
-            mirror_ap: Boolean, determines if both APs use the same hostapd
-                       config or different configs.
-            networks: List of WEP networks.
-            same_ssid: Boolean, determines if both bands on AP use the same
-                       SSID.
-            ssid_length_2gecond AP Int, number of characters to use for 2G SSID.
-            ssid_length_5g: Int, number of characters to use for 5G SSID.
-            passphrase_length_2g: Int, length of password for 2G network.
-            passphrase_length_5g: Int, length of password for 5G network.
-
-        Returns: A dict of 2G and 5G network lists for hostapd configuration.
-
-        """
-        network_dict_2g = {}
-        network_dict_5g = {}
-        ref_5g_security = hostapd_constants.WEP_STRING
-        ref_2g_security = hostapd_constants.WEP_STRING
-
-        if same_ssid:
-            ref_2g_ssid = "xg_%s" % utils.rand_ascii_str(ssid_length_2g)
-            ref_5g_ssid = ref_2g_ssid
-
-            ref_2g_passphrase = utils.rand_hex_str(passphrase_length_2g)
-            ref_5g_passphrase = ref_2g_passphrase
-
-        else:
-            ref_2g_ssid = "2g_%s" % utils.rand_ascii_str(ssid_length_2g)
-            ref_2g_passphrase = utils.rand_hex_str(passphrase_length_2g)
-
-            ref_5g_ssid = "5g_%s" % utils.rand_ascii_str(ssid_length_5g)
-            ref_5g_passphrase = utils.rand_hex_str(passphrase_length_5g)
-
-        network_dict_2g = {
-            "SSID": ref_2g_ssid,
-            "security": ref_2g_security,
-            "wepKeys": [ref_2g_passphrase] * 4,
-            "hiddenSSID": hidden,
-        }
-
-        network_dict_5g = {
-            "SSID": ref_5g_ssid,
-            "security": ref_5g_security,
-            "wepKeys": [ref_2g_passphrase] * 4,
-            "hiddenSSID": hidden,
-        }
-
-        ap = 0
-        for ap in range(MAX_AP_COUNT):
-            networks.append(
-                {"2g": copy.copy(network_dict_2g), "5g": copy.copy(network_dict_5g)}
-            )
-            if not mirror_ap:
-                break
-        return {"2g": network_dict_2g, "5g": network_dict_5g}
-
-    def update_bssid(self, ap_instance, ap, network, band):
-        """Get bssid and update network dictionary.
-
-        Args:
-            ap_instance: Accesspoint index that was configured.
-            ap: Accesspoint object corresponding to ap_instance.
-            network: Network dictionary.
-            band: Wifi networks' band.
-
-        """
-        bssid = ap.get_bssid_from_ssid(network["SSID"], band)
-
-        if network["security"] == hostapd_constants.WPA2_STRING:
-            # TODO:(bamahadev) Change all occurances of reference_networks
-            # to wpa_networks.
-            self.reference_networks[ap_instance][band]["bssid"] = bssid
-        if network["security"] == hostapd_constants.WPA_STRING:
-            self.wpa_networks[ap_instance][band]["bssid"] = bssid
-        if network["security"] == hostapd_constants.WEP_STRING:
-            self.wep_networks[ap_instance][band]["bssid"] = bssid
-        if network["security"] == hostapd_constants.ENT_STRING:
-            if "bssid" not in self.ent_networks[ap_instance][band]:
-                self.ent_networks[ap_instance][band]["bssid"] = bssid
-            else:
-                self.ent_networks_pwd[ap_instance][band]["bssid"] = bssid
-        if network["security"] == "none":
-            self.open_network[ap_instance][band]["bssid"] = bssid
-
-    def populate_bssid(self, ap_instance, ap, networks_5g, networks_2g):
-        """Get bssid for a given SSID and add it to the network dictionary.
-
-        Args:
-            ap_instance: Accesspoint index that was configured.
-            ap: Accesspoint object corresponding to ap_instance.
-            networks_5g: List of 5g networks configured on the APs.
-            networks_2g: List of 2g networks configured on the APs.
-
-        """
-
-        if not (networks_5g or networks_2g):
-            return
-
-        for network in networks_5g:
-            if "channel" in network:
-                continue
-            self.update_bssid(ap_instance, ap, network, hostapd_constants.BAND_5G)
-
-        for network in networks_2g:
-            if "channel" in network:
-                continue
-            self.update_bssid(ap_instance, ap, network, hostapd_constants.BAND_2G)
-
-    def configure_openwrt_ap_and_start(
-        self,
-        channel_5g=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-        channel_2g=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-        channel_5g_ap2=None,
-        channel_2g_ap2=None,
-        ssid_length_2g=hostapd_constants.AP_SSID_LENGTH_2G,
-        passphrase_length_2g=hostapd_constants.AP_PASSPHRASE_LENGTH_2G,
-        ssid_length_5g=hostapd_constants.AP_SSID_LENGTH_5G,
-        passphrase_length_5g=hostapd_constants.AP_PASSPHRASE_LENGTH_5G,
-        mirror_ap=False,
-        hidden=False,
-        same_ssid=False,
-        open_network=False,
-        wpa1_network=False,
-        wpa_network=False,
-        wep_network=False,
-        ent_network=False,
-        ent_network_pwd=False,
-        owe_network=False,
-        sae_network=False,
-        saemixed_network=False,
-        radius_conf_2g=None,
-        radius_conf_5g=None,
-        radius_conf_pwd=None,
-        ap_count=1,
-        ieee80211w=None,
-    ):
-        """Create, configure and start OpenWrt AP.
-
-        Args:
-            channel_5g: 5G channel to configure.
-            channel_2g: 2G channel to configure.
-            channel_5g_ap2: 5G channel to configure on AP2.
-            channel_2g_ap2: 2G channel to configure on AP2.
-            ssid_length_2g: Int, number of characters to use for 2G SSID.
-            passphrase_length_2g: Int, length of password for 2G network.
-            ssid_length_5g: Int, number of characters to use for 5G SSID.
-            passphrase_length_5g: Int, length of password for 5G network.
-            same_ssid: Boolean, determines if both bands on AP use the same SSID.
-            open_network: Boolean, to check if open network should be configured.
-            wpa_network: Boolean, to check if wpa network should be configured.
-            wep_network: Boolean, to check if wep network should be configured.
-            ent_network: Boolean, to check if ent network should be configured.
-            ent_network_pwd: Boolean, to check if ent pwd network should be configured.
-            owe_network: Boolean, to check if owe network should be configured.
-            sae_network: Boolean, to check if sae network should be configured.
-            saemixed_network: Boolean, to check if saemixed network should be configured.
-            radius_conf_2g: dictionary with enterprise radius server details.
-            radius_conf_5g: dictionary with enterprise radius server details.
-            radius_conf_pwd: dictionary with enterprise radiuse server details.
-            ap_count: APs to configure.
-            ieee80211w:PMF to configure
-        """
-        if mirror_ap and ap_count == 1:
-            raise ValueError("ap_count cannot be 1 if mirror_ap is True.")
-        if (channel_5g_ap2 or channel_2g_ap2) and ap_count == 1:
-            raise ValueError("ap_count cannot be 1 if channels of AP2 are provided.")
-        # we are creating a channel list for 2G and 5G bands. The list is of
-        # size 2 and this is based on the assumption that each testbed will have
-        # at most 2 APs.
-        if not channel_5g_ap2:
-            channel_5g_ap2 = channel_5g
-        if not channel_2g_ap2:
-            channel_2g_ap2 = channel_2g
-        channels_2g = [channel_2g, channel_2g_ap2]
-        channels_5g = [channel_5g, channel_5g_ap2]
-
-        self.reference_networks = []
-        self.wpa1_networks = []
-        self.wpa_networks = []
-        self.wep_networks = []
-        self.ent_networks = []
-        self.ent_networks_pwd = []
-        self.open_network = []
-        self.owe_networks = []
-        self.sae_networks = []
-        self.saemixed_networks = []
-        self.bssid_map = []
-        for i in range(ap_count):
-            network_list = []
-            if wpa1_network:
-                wpa1_dict = self.get_psk_network(
-                    mirror_ap,
-                    self.wpa1_networks,
-                    hidden,
-                    same_ssid,
-                    ssid_length_2g,
-                    ssid_length_5g,
-                    passphrase_length_2g,
-                    passphrase_length_5g,
-                )
-                wpa1_dict[hostapd_constants.BAND_2G]["security"] = "psk"
-                wpa1_dict[hostapd_constants.BAND_5G]["security"] = "psk"
-                wpa1_dict[hostapd_constants.BAND_2G]["ieee80211w"] = ieee80211w
-                wpa1_dict[hostapd_constants.BAND_5G]["ieee80211w"] = ieee80211w
-                self.wpa1_networks.append(wpa1_dict)
-                network_list.append(wpa1_dict)
-            if wpa_network:
-                wpa_dict = self.get_psk_network(
-                    mirror_ap,
-                    self.reference_networks,
-                    hidden,
-                    same_ssid,
-                    ssid_length_2g,
-                    ssid_length_5g,
-                    passphrase_length_2g,
-                    passphrase_length_5g,
-                )
-                wpa_dict[hostapd_constants.BAND_2G]["security"] = "psk2"
-                wpa_dict[hostapd_constants.BAND_5G]["security"] = "psk2"
-                wpa_dict[hostapd_constants.BAND_2G]["ieee80211w"] = ieee80211w
-                wpa_dict[hostapd_constants.BAND_5G]["ieee80211w"] = ieee80211w
-                self.wpa_networks.append(wpa_dict)
-                network_list.append(wpa_dict)
-            if wep_network:
-                wep_dict = self.get_wep_network(
-                    mirror_ap,
-                    self.wep_networks,
-                    hidden,
-                    same_ssid,
-                    ssid_length_2g,
-                    ssid_length_5g,
-                )
-                network_list.append(wep_dict)
-            if ent_network:
-                ent_dict = self.get_open_network(
-                    mirror_ap,
-                    self.ent_networks,
-                    hidden,
-                    same_ssid,
-                    ssid_length_2g,
-                    ssid_length_5g,
-                )
-                ent_dict["2g"]["security"] = "wpa2"
-                ent_dict["2g"].update(radius_conf_2g)
-                ent_dict["5g"]["security"] = "wpa2"
-                ent_dict["5g"].update(radius_conf_5g)
-                network_list.append(ent_dict)
-            if ent_network_pwd:
-                ent_pwd_dict = self.get_open_network(
-                    mirror_ap,
-                    self.ent_networks_pwd,
-                    hidden,
-                    same_ssid,
-                    ssid_length_2g,
-                    ssid_length_5g,
-                )
-                ent_pwd_dict["2g"]["security"] = "wpa2"
-                ent_pwd_dict["2g"].update(radius_conf_pwd)
-                ent_pwd_dict["5g"]["security"] = "wpa2"
-                ent_pwd_dict["5g"].update(radius_conf_pwd)
-                network_list.append(ent_pwd_dict)
-            if open_network:
-                open_dict = self.get_open_network(
-                    mirror_ap,
-                    self.open_network,
-                    hidden,
-                    same_ssid,
-                    ssid_length_2g,
-                    ssid_length_5g,
-                )
-                network_list.append(open_dict)
-            if owe_network:
-                owe_dict = self.get_open_network(
-                    mirror_ap,
-                    self.owe_networks,
-                    hidden,
-                    same_ssid,
-                    ssid_length_2g,
-                    ssid_length_5g,
-                    "OWE",
-                )
-                owe_dict[hostapd_constants.BAND_2G]["security"] = "owe"
-                owe_dict[hostapd_constants.BAND_5G]["security"] = "owe"
-                network_list.append(owe_dict)
-            if sae_network:
-                sae_dict = self.get_psk_network(
-                    mirror_ap,
-                    self.sae_networks,
-                    hidden,
-                    same_ssid,
-                    hostapd_constants.SAE_KEY_MGMT,
-                    ssid_length_2g,
-                    ssid_length_5g,
-                    passphrase_length_2g,
-                    passphrase_length_5g,
-                )
-                sae_dict[hostapd_constants.BAND_2G]["security"] = "sae"
-                sae_dict[hostapd_constants.BAND_5G]["security"] = "sae"
-                network_list.append(sae_dict)
-            if saemixed_network:
-                saemixed_dict = self.get_psk_network(
-                    mirror_ap,
-                    self.saemixed_networks,
-                    hidden,
-                    same_ssid,
-                    hostapd_constants.SAE_KEY_MGMT,
-                    ssid_length_2g,
-                    ssid_length_5g,
-                    passphrase_length_2g,
-                    passphrase_length_5g,
-                )
-                saemixed_dict[hostapd_constants.BAND_2G]["security"] = "sae-mixed"
-                saemixed_dict[hostapd_constants.BAND_5G]["security"] = "sae-mixed"
-                saemixed_dict[hostapd_constants.BAND_2G]["ieee80211w"] = ieee80211w
-                saemixed_dict[hostapd_constants.BAND_5G]["ieee80211w"] = ieee80211w
-                network_list.append(saemixed_dict)
-            self.access_points[i].configure_ap(
-                network_list, channels_2g[i], channels_5g[i]
-            )
-            self.access_points[i].start_ap()
-            self.bssid_map.append(self.access_points[i].get_bssids_for_wifi_networks())
-            if mirror_ap:
-                self.access_points[i + 1].configure_ap(
-                    network_list, channels_2g[i + 1], channels_5g[i + 1]
-                )
-                self.access_points[i + 1].start_ap()
-                self.bssid_map.append(
-                    self.access_points[i + 1].get_bssids_for_wifi_networks()
-                )
-                break
-
-    def legacy_configure_ap_and_start(
-        self,
-        channel_5g=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-        channel_2g=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-        max_2g_networks=hostapd_constants.AP_DEFAULT_MAX_SSIDS_2G,
-        max_5g_networks=hostapd_constants.AP_DEFAULT_MAX_SSIDS_5G,
-        ap_ssid_length_2g=hostapd_constants.AP_SSID_LENGTH_2G,
-        ap_passphrase_length_2g=hostapd_constants.AP_PASSPHRASE_LENGTH_2G,
-        ap_ssid_length_5g=hostapd_constants.AP_SSID_LENGTH_5G,
-        ap_passphrase_length_5g=hostapd_constants.AP_PASSPHRASE_LENGTH_5G,
-        hidden=False,
-        same_ssid=False,
-        mirror_ap=True,
-        wpa_network=False,
-        wep_network=False,
-        ent_network=False,
-        radius_conf_2g=None,
-        radius_conf_5g=None,
-        ent_network_pwd=False,
-        radius_conf_pwd=None,
-        ap_count=1,
-    ):
-        config_count = 1
-        count = 0
-
-        # For example, the NetworkSelector tests use 2 APs and require that
-        # both APs are not mirrored.
-        if not mirror_ap and ap_count == 1:
-            raise ValueError("ap_count cannot be 1 if mirror_ap is False.")
-
-        if not mirror_ap:
-            config_count = ap_count
-
-        self.user_params["reference_networks"] = []
-        self.user_params["open_network"] = []
-        if wpa_network:
-            self.user_params["wpa_networks"] = []
-        if wep_network:
-            self.user_params["wep_networks"] = []
-        if ent_network:
-            self.user_params["ent_networks"] = []
-        if ent_network_pwd:
-            self.user_params["ent_networks_pwd"] = []
-
-        # kill hostapd & dhcpd if the cleanup was not successful
-        for i in range(len(self.access_points)):
-            self.log.debug("Check ap state and cleanup")
-            self._cleanup_hostapd_and_dhcpd(i)
-
-        for count in range(config_count):
-            network_list_2g = []
-            network_list_5g = []
-
-            orig_network_list_2g = []
-            orig_network_list_5g = []
-
-            network_list_2g.append({"channel": channel_2g})
-            network_list_5g.append({"channel": channel_5g})
-
-            networks_dict = self.get_psk_network(
-                mirror_ap,
-                self.user_params["reference_networks"],
-                hidden=hidden,
-                same_ssid=same_ssid,
-            )
-            self.reference_networks = self.user_params["reference_networks"]
-
-            network_list_2g.append(networks_dict["2g"])
-            network_list_5g.append(networks_dict["5g"])
-
-            # When same_ssid is set, only configure one set of WPA networks.
-            # We cannot have more than one set because duplicate interface names
-            # are not allowed.
-            # TODO(bmahadev): Provide option to select the type of network,
-            # instead of defaulting to WPA.
-            if not same_ssid:
-                networks_dict = self.get_open_network(
-                    mirror_ap,
-                    self.user_params["open_network"],
-                    hidden=hidden,
-                    same_ssid=same_ssid,
-                )
-                self.open_network = self.user_params["open_network"]
-
-                network_list_2g.append(networks_dict["2g"])
-                network_list_5g.append(networks_dict["5g"])
-
-                if wpa_network:
-                    networks_dict = self.get_psk_network(
-                        mirror_ap,
-                        self.user_params["wpa_networks"],
-                        hidden=hidden,
-                        same_ssid=same_ssid,
-                        security_mode=hostapd_constants.WPA_STRING,
-                    )
-                    self.wpa_networks = self.user_params["wpa_networks"]
-
-                    network_list_2g.append(networks_dict["2g"])
-                    network_list_5g.append(networks_dict["5g"])
-
-                if wep_network:
-                    networks_dict = self.get_wep_network(
-                        mirror_ap,
-                        self.user_params["wep_networks"],
-                        hidden=hidden,
-                        same_ssid=same_ssid,
-                    )
-                    self.wep_networks = self.user_params["wep_networks"]
-
-                    network_list_2g.append(networks_dict["2g"])
-                    network_list_5g.append(networks_dict["5g"])
-
-                if ent_network:
-                    networks_dict = self.get_open_network(
-                        mirror_ap,
-                        self.user_params["ent_networks"],
-                        hidden=hidden,
-                        same_ssid=same_ssid,
-                    )
-                    networks_dict["2g"]["security"] = hostapd_constants.ENT_STRING
-                    networks_dict["2g"].update(radius_conf_2g)
-                    networks_dict["5g"]["security"] = hostapd_constants.ENT_STRING
-                    networks_dict["5g"].update(radius_conf_5g)
-                    self.ent_networks = self.user_params["ent_networks"]
-
-                    network_list_2g.append(networks_dict["2g"])
-                    network_list_5g.append(networks_dict["5g"])
-
-                if ent_network_pwd:
-                    networks_dict = self.get_open_network(
-                        mirror_ap,
-                        self.user_params["ent_networks_pwd"],
-                        hidden=hidden,
-                        same_ssid=same_ssid,
-                    )
-                    networks_dict["2g"]["security"] = hostapd_constants.ENT_STRING
-                    networks_dict["2g"].update(radius_conf_pwd)
-                    networks_dict["5g"]["security"] = hostapd_constants.ENT_STRING
-                    networks_dict["5g"].update(radius_conf_pwd)
-                    self.ent_networks_pwd = self.user_params["ent_networks_pwd"]
-
-                    network_list_2g.append(networks_dict["2g"])
-                    network_list_5g.append(networks_dict["5g"])
-
-            orig_network_list_5g = copy.copy(network_list_5g)
-            orig_network_list_2g = copy.copy(network_list_2g)
-
-            if len(network_list_5g) > 1:
-                self.config_5g = self._generate_legacy_ap_config(network_list_5g)
-            if len(network_list_2g) > 1:
-                self.config_2g = self._generate_legacy_ap_config(network_list_2g)
-
-            self.access_points[count].start_ap(self.config_2g)
-            self.access_points[count].start_ap(self.config_5g)
-            self.populate_bssid(
-                count,
-                self.access_points[count],
-                orig_network_list_5g,
-                orig_network_list_2g,
-            )
-
-        # Repeat configuration on the second router.
-        if mirror_ap and ap_count == 2:
-            self.access_points[AP_2].start_ap(self.config_2g)
-            self.access_points[AP_2].start_ap(self.config_5g)
-            self.populate_bssid(
-                AP_2,
-                self.access_points[AP_2],
-                orig_network_list_5g,
-                orig_network_list_2g,
-            )
-
-    def _kill_processes(self, ap, daemon):
-        """Kill hostapd and dhcpd daemons
-
-        Args:
-            ap: AP to cleanup
-            daemon: process to kill
-
-        Returns: True/False if killing process is successful
-        """
-        self.log.info("Killing %s" % daemon)
-        pids = ap.ssh.run("pidof %s" % daemon, ignore_status=True)
-        if pids.stdout:
-            ap.ssh.run("kill %s" % pids.stdout, ignore_status=True)
-        time.sleep(3)
-        pids = ap.ssh.run("pidof %s" % daemon, ignore_status=True)
-        if pids.stdout:
-            return False
-        return True
-
-    def _cleanup_hostapd_and_dhcpd(self, count):
-        """Check if AP was cleaned up properly
-
-        Kill hostapd and dhcpd processes if cleanup was not successful in the
-        last run
-
-        Args:
-            count: AP to check
-
-        Returns:
-            New AccessPoint object if AP required cleanup
-
-        Raises:
-            Error: if the AccessPoint timed out to setup
-        """
-        ap = self.access_points[count]
-        phy_ifaces = ap.interfaces.get_physical_interface()
-        kill_hostapd = False
-        for iface in phy_ifaces:
-            if "2g_" in iface or "5g_" in iface or "xg_" in iface:
-                kill_hostapd = True
-                break
-
-        if not kill_hostapd:
-            return
-
-        self.log.debug("Cleanup AP")
-        if not self._kill_processes(ap, "hostapd") or not self._kill_processes(
-            ap, "dhcpd"
-        ):
-            raise ("Failed to cleanup AP")
-
-        ap.__init__(self.user_params["AccessPoint"][count])
-
-    def _generate_legacy_ap_config(self, network_list):
-        bss_settings = []
-        wlan_2g = self.access_points[AP_1].wlan_2g
-        wlan_5g = self.access_points[AP_1].wlan_5g
-        ap_settings = network_list.pop(0)
-        # TODO:(bmahadev) This is a bug. We should not have to pop the first
-        # network in the list and treat it as a separate case. Instead,
-        # create_ap_preset() should be able to take NULL ssid and security and
-        # build config based on the bss_Settings alone.
-        hostapd_config_settings = network_list.pop(0)
-        for network in network_list:
-            if "password" in network:
-                bss_settings.append(
-                    hostapd_bss_settings.BssSettings(
-                        name=network["SSID"],
-                        ssid=network["SSID"],
-                        hidden=network["hiddenSSID"],
-                        security=hostapd_security.Security(
-                            security_mode=network["security"],
-                            password=network["password"],
-                        ),
-                    )
-                )
-            elif "wepKeys" in network:
-                bss_settings.append(
-                    hostapd_bss_settings.BssSettings(
-                        name=network["SSID"],
-                        ssid=network["SSID"],
-                        hidden=network["hiddenSSID"],
-                        security=hostapd_security.Security(
-                            security_mode=network["security"],
-                            password=network["wepKeys"][0],
-                        ),
-                    )
-                )
-            elif network["security"] == hostapd_constants.ENT_STRING:
-                bss_settings.append(
-                    hostapd_bss_settings.BssSettings(
-                        name=network["SSID"],
-                        ssid=network["SSID"],
-                        hidden=network["hiddenSSID"],
-                        security=hostapd_security.Security(
-                            security_mode=network["security"],
-                            radius_server_ip=network["radius_server_ip"],
-                            radius_server_port=network["radius_server_port"],
-                            radius_server_secret=network["radius_server_secret"],
-                        ),
-                    )
-                )
-            else:
-                bss_settings.append(
-                    hostapd_bss_settings.BssSettings(
-                        name=network["SSID"],
-                        ssid=network["SSID"],
-                        hidden=network["hiddenSSID"],
-                    )
-                )
-        if "password" in hostapd_config_settings:
-            config = hostapd_ap_preset.create_ap_preset(
-                iface_wlan_2g=wlan_2g,
-                iface_wlan_5g=wlan_5g,
-                channel=ap_settings["channel"],
-                ssid=hostapd_config_settings["SSID"],
-                hidden=hostapd_config_settings["hiddenSSID"],
-                security=hostapd_security.Security(
-                    security_mode=hostapd_config_settings["security"],
-                    password=hostapd_config_settings["password"],
-                ),
-                bss_settings=bss_settings,
-            )
-        elif "wepKeys" in hostapd_config_settings:
-            config = hostapd_ap_preset.create_ap_preset(
-                iface_wlan_2g=wlan_2g,
-                iface_wlan_5g=wlan_5g,
-                channel=ap_settings["channel"],
-                ssid=hostapd_config_settings["SSID"],
-                hidden=hostapd_config_settings["hiddenSSID"],
-                security=hostapd_security.Security(
-                    security_mode=hostapd_config_settings["security"],
-                    password=hostapd_config_settings["wepKeys"][0],
-                ),
-                bss_settings=bss_settings,
-            )
-        else:
-            config = hostapd_ap_preset.create_ap_preset(
-                iface_wlan_2g=wlan_2g,
-                iface_wlan_5g=wlan_5g,
-                channel=ap_settings["channel"],
-                ssid=hostapd_config_settings["SSID"],
-                hidden=hostapd_config_settings["hiddenSSID"],
-                bss_settings=bss_settings,
-            )
-        return config
-
-    def configure_packet_capture(
-        self,
-        channel_5g=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-        channel_2g=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-    ):
-        """Configure packet capture for 2G and 5G bands.
-
-        Args:
-            channel_5g: Channel to set the monitor mode to for 5G band.
-            channel_2g: Channel to set the monitor mode to for 2G band.
-        """
-        self.packet_capture = self.packet_capture[0]
-        result = self.packet_capture.configure_monitor_mode(
-            hostapd_constants.BAND_2G, channel_2g
-        )
-        if not result:
-            raise ValueError("Failed to configure channel for 2G band")
-
-        result = self.packet_capture.configure_monitor_mode(
-            hostapd_constants.BAND_5G, channel_5g
-        )
-        if not result:
-            raise ValueError("Failed to configure channel for 5G band.")
-
-    @staticmethod
-    def wifi_test_wrap(fn):
-        def _safe_wrap_test_case(self, *args, **kwargs):
-            test_id = "%s:%s:%s" % (
-                self.__class__.__name__,
-                self.test_name,
-                self.log_begin_time.replace(" ", "-"),
-            )
-            self.test_id = test_id
-            self.result_detail = ""
-            tries = int(self.user_params.get("wifi_auto_rerun", 3))
-            for ad in self.android_devices:
-                ad.log_path = self.log_path
-            for i in range(tries + 1):
-                result = True
-                if i > 0:
-                    log_string = "[Test Case] RETRY:%s %s" % (i, self.test_name)
-                    self.log.info(log_string)
-                    self._teardown_test(self.test_name)
-                    self._setup_test(self.test_name)
-                try:
-                    result = fn(self, *args, **kwargs)
-                except signals.TestFailure as e:
-                    self.log.warn("Error msg: %s" % e)
-                    if self.result_detail:
-                        signal.details = self.result_detail
-                    result = False
-                except signals.TestSignal:
-                    if self.result_detail:
-                        signal.details = self.result_detail
-                    raise
-                except Exception as e:
-                    self.log.exception(e)
-                    asserts.fail(self.result_detail)
-                if result is False:
-                    if i < tries:
-                        continue
-                else:
-                    break
-            if result is not False:
-                asserts.explicit_pass(self.result_detail)
-            else:
-                asserts.fail(self.result_detail)
-
-        return _safe_wrap_test_case
diff --git a/src/antlion/test_utils/wifi/wifi_performance_test_utils/__init__.py b/src/antlion/test_utils/wifi/wifi_performance_test_utils/__init__.py
deleted file mode 100644
index 567077e..0000000
--- a/src/antlion/test_utils/wifi/wifi_performance_test_utils/__init__.py
+++ /dev/null
@@ -1,770 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-import importlib
-import ipaddress
-import logging
-import numpy
-import re
-import time
-from concurrent.futures import ThreadPoolExecutor
-
-from antlion import utils
-from antlion.controllers.android_device import AndroidDevice
-from antlion.controllers.utils_lib import ssh
-from antlion.test_utils.wifi import wifi_test_utils as wutils
-from antlion.test_utils.wifi.wifi_performance_test_utils import ping_utils
-from antlion.test_utils.wifi.wifi_performance_test_utils import qcom_utils
-from antlion.test_utils.wifi.wifi_performance_test_utils import brcm_utils
-
-from mobly import asserts
-
-SHORT_SLEEP = 1
-MED_SLEEP = 6
-CHANNELS_6GHz = ["6g{}".format(4 * x + 1) for x in range(59)]
-BAND_TO_CHANNEL_MAP = {
-    "2.4GHz": list(range(1, 14)),
-    "UNII-1": [36, 40, 44, 48],
-    "UNII-2": [52, 56, 60, 64, 100, 104, 108, 112, 116, 120, 124, 128, 132, 140],
-    "UNII-3": [149, 153, 157, 161, 165],
-    "6GHz": CHANNELS_6GHz,
-}
-CHANNEL_TO_BAND_MAP = {
-    channel: band
-    for band, channels in BAND_TO_CHANNEL_MAP.items()
-    for channel in channels
-}
-
-
-# Decorators
-def nonblocking(f):
-    """Creates a decorator transforming function calls to non-blocking"""
-
-    def wrap(*args, **kwargs):
-        executor = ThreadPoolExecutor(max_workers=1)
-        thread_future = executor.submit(f, *args, **kwargs)
-        # Ensure resources are freed up when executor ruturns or raises
-        executor.shutdown(wait=False)
-        return thread_future
-
-    return wrap
-
-
-def detect_wifi_platform(dut):
-    if hasattr(dut, "wifi_platform"):
-        return dut.wifi_platform
-    qcom_check = len(dut.get_file_names("/vendor/firmware/wlan/qca_cld/"))
-    if qcom_check:
-        dut.wifi_platform = "qcom"
-    else:
-        dut.wifi_platform = "brcm"
-    return dut.wifi_platform
-
-
-def detect_wifi_decorator(f):
-    def wrap(*args, **kwargs):
-        if "dut" in kwargs:
-            dut = kwargs["dut"]
-        else:
-            dut = next(arg for arg in args if type(arg) == AndroidDevice)
-        dut_package = (
-            "acts_contrib.test_utils.wifi.wifi_performance_test_utils.{}_utils".format(
-                detect_wifi_platform(dut)
-            )
-        )
-        dut_package = importlib.import_module(dut_package)
-        f_decorated = getattr(dut_package, f.__name__, lambda: None)
-        return f_decorated(*args, **kwargs)
-
-    return wrap
-
-
-# JSON serializer
-def serialize_dict(input_dict):
-    """Function to serialize dicts to enable JSON output"""
-    output_dict = collections.OrderedDict()
-    for key, value in input_dict.items():
-        output_dict[_serialize_value(key)] = _serialize_value(value)
-    return output_dict
-
-
-def _serialize_value(value):
-    """Function to recursively serialize dict entries to enable JSON output"""
-    if isinstance(value, tuple):
-        return str(value)
-    if isinstance(value, numpy.int64):
-        return int(value)
-    if isinstance(value, numpy.float64):
-        return float(value)
-    if isinstance(value, list):
-        return [_serialize_value(x) for x in value]
-    if isinstance(value, numpy.ndarray):
-        return [_serialize_value(x) for x in value]
-    elif isinstance(value, dict):
-        return serialize_dict(value)
-    elif type(value) in (float, int, bool, str):
-        return value
-    else:
-        return "Non-serializable object"
-
-
-def extract_sub_dict(full_dict, fields):
-    sub_dict = collections.OrderedDict((field, full_dict[field]) for field in fields)
-    return sub_dict
-
-
-# Miscellaneous Wifi Utilities
-def check_skip_conditions(testcase_params, dut, access_point, ota_chamber=None):
-    """Checks if test should be skipped."""
-    # Check battery level before test
-    if not health_check(dut, 10):
-        asserts.skip("DUT battery level too low.")
-    if not access_point.band_lookup_by_channel(testcase_params["channel"]):
-        asserts.skip("AP does not support requested channel.")
-    if (
-        ota_chamber
-        and CHANNEL_TO_BAND_MAP[testcase_params["channel"]]
-        not in ota_chamber.SUPPORTED_BANDS
-    ):
-        asserts.skip("OTA chamber does not support requested channel.")
-    # Check if 6GHz is supported by checking capabilities in the US.
-    if not dut.droid.wifiCheckState():
-        wutils.wifi_toggle_state(dut, True)
-    iw_list = dut.adb.shell("iw list")
-    supports_6ghz = "6135 MHz" in iw_list
-    supports_160mhz = "Supported Channel Width: 160 MHz" in iw_list
-    if testcase_params.get("bandwidth", 20) == 160 and not supports_160mhz:
-        asserts.skip("DUT does not support 160 MHz networks.")
-    if testcase_params.get("channel", 6) in CHANNELS_6GHz and not supports_6ghz:
-        asserts.skip("DUT does not support 6 GHz band.")
-
-
-def validate_network(dut, ssid):
-    """Check that DUT has a valid internet connection through expected SSID
-
-    Args:
-        dut: android device of interest
-        ssid: expected ssid
-    """
-    try:
-        connected = wutils.validate_connection(dut, wait_time=3) is not None
-        current_network = dut.droid.wifiGetConnectionInfo()
-    except:
-        connected = False
-        current_network = None
-    if connected and current_network["SSID"] == ssid:
-        return True
-    else:
-        return False
-
-
-def get_server_address(ssh_connection, dut_ip, subnet_mask):
-    """Get server address on a specific subnet,
-
-    This function retrieves the LAN or WAN IP of a remote machine used in
-    testing. If subnet_mask is set to 'public' it returns a machines global ip,
-    else it returns the ip belonging to the dut local network given the dut's
-    ip and subnet mask.
-
-    Args:
-        ssh_connection: object representing server for which we want an ip
-        dut_ip: string in ip address format, i.e., xxx.xxx.xxx.xxx
-        subnet_mask: string representing subnet mask (public for global ip)
-    """
-    ifconfig_out = ssh_connection.run("ifconfig").stdout
-    ip_list = re.findall("inet (?:addr:)?(\d+.\d+.\d+.\d+)", ifconfig_out)
-    ip_list = [ipaddress.ip_address(ip) for ip in ip_list]
-
-    if subnet_mask == "public":
-        for ip in ip_list:
-            # is_global is not used to allow for CGNAT ips in 100.x.y.z range
-            if not ip.is_private:
-                return str(ip)
-    else:
-        dut_network = ipaddress.ip_network(
-            "{}/{}".format(dut_ip, subnet_mask), strict=False
-        )
-        for ip in ip_list:
-            if ip in dut_network:
-                return str(ip)
-    logging.error("No IP address found in requested subnet")
-
-
-# Ping utilities
-def get_ping_stats(src_device, dest_address, ping_duration, ping_interval, ping_size):
-    """Run ping to or from the DUT.
-
-    The function computes either pings the DUT or pings a remote ip from
-    DUT.
-
-    Args:
-        src_device: object representing device to ping from
-        dest_address: ip address to ping
-        ping_duration: timeout to set on the ping process (in seconds)
-        ping_interval: time between pings (in seconds)
-        ping_size: size of ping packet payload
-    Returns:
-        ping_result: dict containing ping results and other meta data
-    """
-    ping_count = int(ping_duration / ping_interval)
-    ping_deadline = int(ping_count * ping_interval) + 1
-    ping_cmd_linux = "ping -c {} -w {} -i {} -s {} -D".format(
-        ping_count,
-        ping_deadline,
-        ping_interval,
-        ping_size,
-    )
-
-    ping_cmd_macos = "ping -c {} -t {} -i {} -s {}".format(
-        ping_count,
-        ping_deadline,
-        ping_interval,
-        ping_size,
-    )
-
-    if isinstance(src_device, AndroidDevice):
-        ping_cmd = "{} {}".format(ping_cmd_linux, dest_address)
-        ping_output = src_device.adb.shell(
-            ping_cmd, timeout=ping_deadline + SHORT_SLEEP, ignore_status=True
-        )
-    elif isinstance(src_device, ssh.connection.SshConnection):
-        platform = src_device.run("uname").stdout
-        if "linux" in platform.lower():
-            ping_cmd = "sudo {} {}".format(ping_cmd_linux, dest_address)
-        elif "darwin" in platform.lower():
-            ping_cmd = "sudo {} {}| while IFS= read -r line; do printf '[%s] %s\n' \"$(gdate '+%s.%N')\" \"$line\"; done".format(
-                ping_cmd_macos, dest_address
-            )
-        ping_output = src_device.run(
-            ping_cmd, timeout=ping_deadline + SHORT_SLEEP, ignore_status=True
-        ).stdout
-    else:
-        raise TypeError(
-            "Unable to ping using src_device of type %s." % type(src_device)
-        )
-    return ping_utils.PingResult(ping_output.splitlines())
-
-
-@nonblocking
-def get_ping_stats_nb(
-    src_device, dest_address, ping_duration, ping_interval, ping_size
-):
-    return get_ping_stats(
-        src_device, dest_address, ping_duration, ping_interval, ping_size
-    )
-
-
-# Iperf utilities
-@nonblocking
-def start_iperf_client_nb(iperf_client, iperf_server_address, iperf_args, tag, timeout):
-    return iperf_client.start(iperf_server_address, iperf_args, tag, timeout)
-
-
-def get_iperf_arg_string(
-    duration,
-    reverse_direction,
-    interval=1,
-    traffic_type="TCP",
-    socket_size=None,
-    num_processes=1,
-    udp_throughput="1000M",
-    ipv6=False,
-):
-    """Function to format iperf client arguments.
-
-    This function takes in iperf client parameters and returns a properly
-    formatter iperf arg string to be used in throughput tests.
-
-    Args:
-        duration: iperf duration in seconds
-        reverse_direction: boolean controlling the -R flag for iperf clients
-        interval: iperf print interval
-        traffic_type: string specifying TCP or UDP traffic
-        socket_size: string specifying TCP window or socket buffer, e.g., 2M
-        num_processes: int specifying number of iperf processes
-        udp_throughput: string specifying TX throughput in UDP tests, e.g. 100M
-        ipv6: boolean controlling the use of IP V6
-    Returns:
-        iperf_args: string of formatted iperf args
-    """
-    iperf_args = "-i {} -t {} -J ".format(interval, duration)
-    if ipv6:
-        iperf_args = iperf_args + "-6 "
-    if traffic_type.upper() == "UDP":
-        iperf_args = iperf_args + "-u -b {} -l 1470 -P {} ".format(
-            udp_throughput, num_processes
-        )
-    elif traffic_type.upper() == "TCP":
-        iperf_args = iperf_args + "-P {} ".format(num_processes)
-    if socket_size:
-        iperf_args = iperf_args + "-w {} ".format(socket_size)
-    if reverse_direction:
-        iperf_args = iperf_args + " -R"
-    return iperf_args
-
-
-# Attenuator Utilities
-def atten_by_label(atten_list, path_label, atten_level):
-    """Attenuate signals according to their path label.
-
-    Args:
-        atten_list: list of attenuators to iterate over
-        path_label: path label on which to set desired attenuation
-        atten_level: attenuation desired on path
-    """
-    for atten in atten_list:
-        if path_label in atten.path:
-            atten.set_atten(atten_level, retry=True)
-
-
-def get_atten_for_target_rssi(target_rssi, attenuators, dut, ping_server):
-    """Function to estimate attenuation to hit a target RSSI.
-
-    This function estimates a constant attenuation setting on all atennuation
-    ports to hit a target RSSI. The estimate is not meant to be exact or
-    guaranteed.
-
-    Args:
-        target_rssi: rssi of interest
-        attenuators: list of attenuator ports
-        dut: android device object assumed connected to a wifi network.
-        ping_server: ssh connection object to ping server
-    Returns:
-        target_atten: attenuation setting to achieve target_rssi
-    """
-    logging.info("Searching attenuation for RSSI = {}dB".format(target_rssi))
-    # Set attenuator to 0 dB
-    for atten in attenuators:
-        atten.set_atten(0, strict=False, retry=True)
-    # Start ping traffic
-    dut_ip = dut.droid.connectivityGetIPv4Addresses("wlan0")[0]
-    # Measure starting RSSI
-    ping_future = get_ping_stats_nb(
-        src_device=ping_server,
-        dest_address=dut_ip,
-        ping_duration=1.5,
-        ping_interval=0.02,
-        ping_size=64,
-    )
-    current_rssi = get_connected_rssi(
-        dut,
-        num_measurements=4,
-        polling_frequency=0.25,
-        first_measurement_delay=0.5,
-        disconnect_warning=1,
-        ignore_samples=1,
-    )
-    current_rssi = current_rssi["signal_poll_rssi"]["mean"]
-    ping_future.result()
-    target_atten = 0
-    logging.debug(
-        "RSSI @ {0:.2f}dB attenuation = {1:.2f}".format(target_atten, current_rssi)
-    )
-    within_range = 0
-    for idx in range(20):
-        atten_delta = max(min(current_rssi - target_rssi, 20), -20)
-        target_atten = int((target_atten + atten_delta) * 4) / 4
-        if target_atten < 0:
-            return 0
-        if target_atten > attenuators[0].get_max_atten():
-            return attenuators[0].get_max_atten()
-        for atten in attenuators:
-            atten.set_atten(target_atten, strict=False, retry=True)
-        ping_future = get_ping_stats_nb(
-            src_device=ping_server,
-            dest_address=dut_ip,
-            ping_duration=1.5,
-            ping_interval=0.02,
-            ping_size=64,
-        )
-        current_rssi = get_connected_rssi(
-            dut,
-            num_measurements=4,
-            polling_frequency=0.25,
-            first_measurement_delay=0.5,
-            disconnect_warning=1,
-            ignore_samples=1,
-        )
-        current_rssi = current_rssi["signal_poll_rssi"]["mean"]
-        ping_future.result()
-        logging.info(
-            "RSSI @ {0:.2f}dB attenuation = {1:.2f}".format(target_atten, current_rssi)
-        )
-        if abs(current_rssi - target_rssi) < 1:
-            if within_range:
-                logging.info(
-                    "Reached RSSI: {0:.2f}. Target RSSI: {1:.2f}."
-                    "Attenuation: {2:.2f}, Iterations = {3:.2f}".format(
-                        current_rssi, target_rssi, target_atten, idx
-                    )
-                )
-                return target_atten
-            else:
-                within_range = True
-        else:
-            within_range = False
-    return target_atten
-
-
-def get_current_atten_dut_chain_map(attenuators, dut, ping_server, ping_from_dut=False):
-    """Function to detect mapping between attenuator ports and DUT chains.
-
-    This function detects the mapping between attenuator ports and DUT chains
-    in cases where DUT chains are connected to only one attenuator port. The
-    function assumes the DUT is already connected to a wifi network. The
-    function starts by measuring per chain RSSI at 0 attenuation, then
-    attenuates one port at a time looking for the chain that reports a lower
-    RSSI.
-
-    Args:
-        attenuators: list of attenuator ports
-        dut: android device object assumed connected to a wifi network.
-        ping_server: ssh connection object to ping server
-        ping_from_dut: boolean controlling whether to ping from or to dut
-    Returns:
-        chain_map: list of dut chains, one entry per attenuator port
-    """
-    # Set attenuator to 0 dB
-    for atten in attenuators:
-        atten.set_atten(0, strict=False, retry=True)
-    # Start ping traffic
-    dut_ip = dut.droid.connectivityGetIPv4Addresses("wlan0")[0]
-    if ping_from_dut:
-        ping_future = get_ping_stats_nb(
-            dut, ping_server._settings.hostname, 11, 0.02, 64
-        )
-    else:
-        ping_future = get_ping_stats_nb(ping_server, dut_ip, 11, 0.02, 64)
-    # Measure starting RSSI
-    base_rssi = get_connected_rssi(dut, 4, 0.25, 1)
-    chain0_base_rssi = base_rssi["chain_0_rssi"]["mean"]
-    chain1_base_rssi = base_rssi["chain_1_rssi"]["mean"]
-    if chain0_base_rssi < -70 or chain1_base_rssi < -70:
-        logging.warning("RSSI might be too low to get reliable chain map.")
-    # Compile chain map by attenuating one path at a time and seeing which
-    # chain's RSSI degrades
-    chain_map = []
-    for test_atten in attenuators:
-        # Set one attenuator to 30 dB down
-        test_atten.set_atten(30, strict=False, retry=True)
-        # Get new RSSI
-        test_rssi = get_connected_rssi(dut, 4, 0.25, 1)
-        # Assign attenuator to path that has lower RSSI
-        if (
-            chain0_base_rssi > -70
-            and chain0_base_rssi - test_rssi["chain_0_rssi"]["mean"] > 10
-        ):
-            chain_map.append("DUT-Chain-0")
-        elif (
-            chain1_base_rssi > -70
-            and chain1_base_rssi - test_rssi["chain_1_rssi"]["mean"] > 10
-        ):
-            chain_map.append("DUT-Chain-1")
-        else:
-            chain_map.append(None)
-        # Reset attenuator to 0
-        test_atten.set_atten(0, strict=False, retry=True)
-    ping_future.result()
-    logging.debug("Chain Map: {}".format(chain_map))
-    return chain_map
-
-
-def get_full_rf_connection_map(
-    attenuators, dut, ping_server, networks, ping_from_dut=False
-):
-    """Function to detect per-network connections between attenuator and DUT.
-
-    This function detects the mapping between attenuator ports and DUT chains
-    on all networks in its arguments. The function connects the DUT to each
-    network then calls get_current_atten_dut_chain_map to get the connection
-    map on the current network. The function outputs the results in two formats
-    to enable easy access when users are interested in indexing by network or
-    attenuator port.
-
-    Args:
-        attenuators: list of attenuator ports
-        dut: android device object assumed connected to a wifi network.
-        ping_server: ssh connection object to ping server
-        networks: dict of network IDs and configs
-    Returns:
-        rf_map_by_network: dict of RF connections indexed by network.
-        rf_map_by_atten: list of RF connections indexed by attenuator
-    """
-    for atten in attenuators:
-        atten.set_atten(0, strict=False, retry=True)
-
-    rf_map_by_network = collections.OrderedDict()
-    rf_map_by_atten = [[] for atten in attenuators]
-    for net_id, net_config in networks.items():
-        wutils.reset_wifi(dut)
-        wutils.wifi_connect(
-            dut,
-            net_config,
-            num_of_tries=1,
-            assert_on_fail=False,
-            check_connectivity=False,
-        )
-        rf_map_by_network[net_id] = get_current_atten_dut_chain_map(
-            attenuators, dut, ping_server, ping_from_dut
-        )
-        for idx, chain in enumerate(rf_map_by_network[net_id]):
-            if chain:
-                rf_map_by_atten[idx].append({"network": net_id, "dut_chain": chain})
-    logging.debug("RF Map (by Network): {}".format(rf_map_by_network))
-    logging.debug("RF Map (by Atten): {}".format(rf_map_by_atten))
-
-    return rf_map_by_network, rf_map_by_atten
-
-
-# Generic device utils
-def get_dut_temperature(dut):
-    """Function to get dut temperature.
-
-    The function fetches and returns the reading from the temperature sensor
-    used for skin temperature and thermal throttling.
-
-    Args:
-        dut: AndroidDevice of interest
-    Returns:
-        temperature: device temperature. 0 if temperature could not be read
-    """
-    candidate_zones = [
-        "/sys/devices/virtual/thermal/tz-by-name/skin-therm/temp",
-        "/sys/devices/virtual/thermal/tz-by-name/sdm-therm-monitor/temp",
-        "/sys/devices/virtual/thermal/tz-by-name/sdm-therm-adc/temp",
-        "/sys/devices/virtual/thermal/tz-by-name/back_therm/temp",
-        "/dev/thermal/tz-by-name/quiet_therm/temp",
-    ]
-    for zone in candidate_zones:
-        try:
-            temperature = int(dut.adb.shell("cat {}".format(zone)))
-            break
-        except:
-            temperature = 0
-    if temperature == 0:
-        logging.debug("Could not check DUT temperature.")
-    elif temperature > 100:
-        temperature = temperature / 1000
-    return temperature
-
-
-def wait_for_dut_cooldown(dut, target_temp=50, timeout=300):
-    """Function to wait for a DUT to cool down.
-
-    Args:
-        dut: AndroidDevice of interest
-        target_temp: target cooldown temperature
-        timeout: maxt time to wait for cooldown
-    """
-    start_time = time.time()
-    while time.time() - start_time < timeout:
-        temperature = get_dut_temperature(dut)
-        if temperature < target_temp:
-            break
-        time.sleep(SHORT_SLEEP)
-    elapsed_time = time.time() - start_time
-    logging.debug(
-        "DUT Final Temperature: {}C. Cooldown duration: {}".format(
-            temperature, elapsed_time
-        )
-    )
-
-
-def health_check(dut, batt_thresh=5, temp_threshold=53, cooldown=1):
-    """Function to check health status of a DUT.
-
-    The function checks both battery levels and temperature to avoid DUT
-    powering off during the test.
-
-    Args:
-        dut: AndroidDevice of interest
-        batt_thresh: battery level threshold
-        temp_threshold: temperature threshold
-        cooldown: flag to wait for DUT to cool down when overheating
-    Returns:
-        health_check: boolean confirming device is healthy
-    """
-    health_check = True
-    battery_level = utils.get_battery_level(dut)
-    if battery_level < batt_thresh:
-        logging.warning("Battery level low ({}%)".format(battery_level))
-        health_check = False
-    else:
-        logging.debug("Battery level = {}%".format(battery_level))
-
-    temperature = get_dut_temperature(dut)
-    if temperature > temp_threshold:
-        if cooldown:
-            logging.warning("Waiting for DUT to cooldown. ({} C)".format(temperature))
-            wait_for_dut_cooldown(dut, target_temp=temp_threshold - 5)
-        else:
-            logging.warning("DUT Overheating ({} C)".format(temperature))
-            health_check = False
-    else:
-        logging.debug("DUT Temperature = {} C".format(temperature))
-    return health_check
-
-
-# Wifi Device Utils
-def empty_rssi_result():
-    return collections.OrderedDict(
-        [("data", []), ("mean", float("nan")), ("stdev", float("nan"))]
-    )
-
-
-@nonblocking
-def get_connected_rssi_nb(
-    dut,
-    num_measurements=1,
-    polling_frequency=SHORT_SLEEP,
-    first_measurement_delay=0,
-    disconnect_warning=True,
-    ignore_samples=0,
-    interface="wlan0",
-):
-    return get_connected_rssi(
-        dut,
-        num_measurements,
-        polling_frequency,
-        first_measurement_delay,
-        disconnect_warning,
-        ignore_samples,
-        interface,
-    )
-
-
-@detect_wifi_decorator
-def get_connected_rssi(
-    dut,
-    num_measurements=1,
-    polling_frequency=SHORT_SLEEP,
-    first_measurement_delay=0,
-    disconnect_warning=True,
-    ignore_samples=0,
-    interface="wlan0",
-):
-    """Gets all RSSI values reported for the connected access point/BSSID.
-
-    Args:
-        dut: android device object from which to get RSSI
-        num_measurements: number of scans done, and RSSIs collected
-        polling_frequency: time to wait between RSSI measurements
-        disconnect_warning: boolean controlling disconnection logging messages
-        ignore_samples: number of leading samples to ignore
-    Returns:
-        connected_rssi: dict containing the measurements results for
-        all reported RSSI values (signal_poll, per chain, etc.) and their
-        statistics
-    """
-
-
-@nonblocking
-def get_scan_rssi_nb(dut, tracked_bssids, num_measurements=1):
-    return get_scan_rssi(dut, tracked_bssids, num_measurements)
-
-
-@detect_wifi_decorator
-def get_scan_rssi(dut, tracked_bssids, num_measurements=1):
-    """Gets scan RSSI for specified BSSIDs.
-
-    Args:
-        dut: android device object from which to get RSSI
-        tracked_bssids: array of BSSIDs to gather RSSI data for
-        num_measurements: number of scans done, and RSSIs collected
-    Returns:
-        scan_rssi: dict containing the measurement results as well as the
-        statistics of the scan RSSI for all BSSIDs in tracked_bssids
-    """
-
-
-@detect_wifi_decorator
-def get_sw_signature(dut):
-    """Function that checks the signature for wifi firmware and config files.
-
-    Returns:
-        bdf_signature: signature consisting of last three digits of bdf cksums
-        fw_signature: floating point firmware version, i.e., major.minor
-    """
-
-
-@detect_wifi_decorator
-def get_country_code(dut):
-    """Function that returns the current wifi country code."""
-
-
-@detect_wifi_decorator
-def push_config(dut, config_file):
-    """Function to push Wifi BDF files
-
-    This function checks for existing wifi bdf files and over writes them all,
-    for simplicity, with the bdf file provided in the arguments. The dut is
-    rebooted for the bdf file to take effect
-
-    Args:
-        dut: dut to push bdf file to
-        config_file: path to bdf_file to push
-    """
-
-
-@detect_wifi_decorator
-def start_wifi_logging(dut):
-    """Function to start collecting wifi-related logs"""
-
-
-@detect_wifi_decorator
-def stop_wifi_logging(dut):
-    """Function to start collecting wifi-related logs"""
-
-
-@detect_wifi_decorator
-def push_firmware(dut, firmware_files):
-    """Function to push Wifi firmware files
-
-    Args:
-        dut: dut to push bdf file to
-        firmware_files: path to wlanmdsp.mbn file
-        datamsc_file: path to Data.msc file
-    """
-
-
-@detect_wifi_decorator
-def disable_beamforming(dut):
-    """Function to disable beamforming."""
-
-
-@detect_wifi_decorator
-def set_nss_capability(dut, nss):
-    """Function to set number of spatial streams supported."""
-
-
-@detect_wifi_decorator
-def set_chain_mask(dut, chain_mask):
-    """Function to set DUT chain mask.
-
-    Args:
-        dut: android device
-        chain_mask: desired chain mask in [0, 1, '2x2']
-    """
-
-
-# Link layer stats utilities
-class LinkLayerStats:
-    def __new__(self, dut, llstats_enabled=True):
-        if detect_wifi_platform(dut) == "qcom":
-            return qcom_utils.LinkLayerStats(dut, llstats_enabled)
-        else:
-            return brcm_utils.LinkLayerStats(dut, llstats_enabled)
diff --git a/src/antlion/test_utils/wifi/wifi_performance_test_utils/bokeh_figure.py b/src/antlion/test_utils/wifi/wifi_performance_test_utils/bokeh_figure.py
deleted file mode 100644
index 0c9aec3..0000000
--- a/src/antlion/test_utils/wifi/wifi_performance_test_utils/bokeh_figure.py
+++ /dev/null
@@ -1,389 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import bokeh, bokeh.plotting, bokeh.io
-import collections
-import itertools
-import json
-import math
-
-
-# Plotting Utilities
-class BokehFigure:
-    """Class enabling  simplified Bokeh plotting."""
-
-    COLORS = [
-        "black",
-        "blue",
-        "blueviolet",
-        "brown",
-        "burlywood",
-        "cadetblue",
-        "cornflowerblue",
-        "crimson",
-        "cyan",
-        "darkblue",
-        "darkgreen",
-        "darkmagenta",
-        "darkorange",
-        "darkred",
-        "deepskyblue",
-        "goldenrod",
-        "green",
-        "grey",
-        "indigo",
-        "navy",
-        "olive",
-        "orange",
-        "red",
-        "salmon",
-        "teal",
-        "yellow",
-    ]
-    MARKERS = [
-        "asterisk",
-        "circle",
-        "circle_cross",
-        "circle_x",
-        "cross",
-        "diamond",
-        "diamond_cross",
-        "hex",
-        "inverted_triangle",
-        "square",
-        "square_x",
-        "square_cross",
-        "triangle",
-        "x",
-    ]
-
-    TOOLS = "box_zoom,box_select,pan,crosshair,redo,undo,reset,hover,save"
-
-    def __init__(
-        self,
-        title=None,
-        x_label=None,
-        primary_y_label=None,
-        secondary_y_label=None,
-        height=700,
-        width=1100,
-        title_size="15pt",
-        axis_label_size="12pt",
-        legend_label_size="12pt",
-        axis_tick_label_size="12pt",
-        x_axis_type="auto",
-        sizing_mode="scale_both",
-        json_file=None,
-    ):
-        if json_file:
-            self.load_from_json(json_file)
-        else:
-            self.figure_data = []
-            self.fig_property = {
-                "title": title,
-                "x_label": x_label,
-                "primary_y_label": primary_y_label,
-                "secondary_y_label": secondary_y_label,
-                "num_lines": 0,
-                "height": height,
-                "width": width,
-                "title_size": title_size,
-                "axis_label_size": axis_label_size,
-                "legend_label_size": legend_label_size,
-                "axis_tick_label_size": axis_tick_label_size,
-                "x_axis_type": x_axis_type,
-                "sizing_mode": sizing_mode,
-            }
-
-    def init_plot(self):
-        self.plot = bokeh.plotting.figure(
-            sizing_mode=self.fig_property["sizing_mode"],
-            plot_width=self.fig_property["width"],
-            plot_height=self.fig_property["height"],
-            title=self.fig_property["title"],
-            tools=self.TOOLS,
-            x_axis_type=self.fig_property["x_axis_type"],
-            output_backend="webgl",
-        )
-        tooltips = [
-            ("index", "$index"),
-            ("(x,y)", "($x, $y)"),
-        ]
-        hover_set = []
-        for line in self.figure_data:
-            hover_set.extend(line["hover_text"].keys())
-        hover_set = set(hover_set)
-        for item in hover_set:
-            tooltips.append((item, "@{}".format(item)))
-        self.plot.hover.tooltips = tooltips
-        self.plot.add_tools(bokeh.models.tools.WheelZoomTool(dimensions="width"))
-        self.plot.add_tools(bokeh.models.tools.WheelZoomTool(dimensions="height"))
-
-    def _filter_line(self, x_data, y_data, hover_text=None):
-        """Function to remove NaN points from bokeh plots."""
-        x_data_filtered = []
-        y_data_filtered = []
-        hover_text_filtered = {}
-        for idx, xy in enumerate(
-            itertools.zip_longest(x_data, y_data, fillvalue=float("nan"))
-        ):
-            if not math.isnan(xy[1]):
-                x_data_filtered.append(xy[0])
-                y_data_filtered.append(xy[1])
-                if hover_text:
-                    for key, value in hover_text.items():
-                        hover_text_filtered.setdefault(key, [])
-                        hover_text_filtered[key].append(
-                            value[idx] if len(value) > idx else ""
-                        )
-        return x_data_filtered, y_data_filtered, hover_text_filtered
-
-    def add_line(
-        self,
-        x_data,
-        y_data,
-        legend,
-        hover_text=None,
-        color=None,
-        width=3,
-        style="solid",
-        marker=None,
-        marker_size=10,
-        shaded_region=None,
-        y_axis="default",
-    ):
-        """Function to add line to existing BokehFigure.
-
-        Args:
-            x_data: list containing x-axis values for line
-            y_data: list containing y_axis values for line
-            legend: string containing line title
-            hover_text: text to display when hovering over lines
-            color: string describing line color
-            width: integer line width
-            style: string describing line style, e.g, solid or dashed
-            marker: string specifying line marker, e.g., cross
-            shaded region: data describing shaded region to plot
-            y_axis: identifier for y-axis to plot line against
-        """
-        if y_axis not in ["default", "secondary"]:
-            raise ValueError("y_axis must be default or secondary")
-        if color == None:
-            color = self.COLORS[self.fig_property["num_lines"] % len(self.COLORS)]
-        if style == "dashed":
-            style = [5, 5]
-        if isinstance(hover_text, list):
-            hover_text = {"info": hover_text}
-        x_data_filter, y_data_filter, hover_text_filter = self._filter_line(
-            x_data, y_data, hover_text
-        )
-        self.figure_data.append(
-            {
-                "x_data": x_data_filter,
-                "y_data": y_data_filter,
-                "legend": legend,
-                "hover_text": hover_text_filter,
-                "color": color,
-                "width": width,
-                "style": style,
-                "marker": marker,
-                "marker_size": marker_size,
-                "shaded_region": shaded_region,
-                "y_axis": y_axis,
-            }
-        )
-        self.fig_property["num_lines"] += 1
-
-    def add_scatter(
-        self,
-        x_data,
-        y_data,
-        legend,
-        hover_text=None,
-        color=None,
-        marker=None,
-        marker_size=10,
-        y_axis="default",
-    ):
-        """Function to add line to existing BokehFigure.
-
-        Args:
-            x_data: list containing x-axis values for line
-            y_data: list containing y_axis values for line
-            legend: string containing line title
-            hover_text: text to display when hovering over lines
-            color: string describing line color
-            marker: string specifying marker, e.g., cross
-            y_axis: identifier for y-axis to plot line against
-        """
-        if y_axis not in ["default", "secondary"]:
-            raise ValueError("y_axis must be default or secondary")
-        if color == None:
-            color = self.COLORS[self.fig_property["num_lines"] % len(self.COLORS)]
-        if marker == None:
-            marker = self.MARKERS[self.fig_property["num_lines"] % len(self.MARKERS)]
-        self.figure_data.append(
-            {
-                "x_data": x_data,
-                "y_data": y_data,
-                "legend": legend,
-                "hover_text": hover_text,
-                "color": color,
-                "width": 0,
-                "style": "solid",
-                "marker": marker,
-                "marker_size": marker_size,
-                "shaded_region": None,
-                "y_axis": y_axis,
-            }
-        )
-        self.fig_property["num_lines"] += 1
-
-    def generate_figure(self, output_file=None, save_json=True):
-        """Function to generate and save BokehFigure.
-
-        Args:
-            output_file: string specifying output file path
-        """
-        self.init_plot()
-        two_axes = False
-        for line in self.figure_data:
-            data_dict = {"x": line["x_data"], "y": line["y_data"]}
-            for key, value in line["hover_text"].items():
-                data_dict[key] = value
-            source = bokeh.models.ColumnDataSource(data=data_dict)
-            if line["width"] > 0:
-                self.plot.line(
-                    x="x",
-                    y="y",
-                    legend_label=line["legend"],
-                    line_width=line["width"],
-                    color=line["color"],
-                    line_dash=line["style"],
-                    name=line["y_axis"],
-                    y_range_name=line["y_axis"],
-                    source=source,
-                )
-            if line["shaded_region"]:
-                band_x = line["shaded_region"]["x_vector"]
-                band_x.extend(line["shaded_region"]["x_vector"][::-1])
-                band_y = line["shaded_region"]["lower_limit"]
-                band_y.extend(line["shaded_region"]["upper_limit"][::-1])
-                self.plot.patch(
-                    band_x, band_y, color="#7570B3", line_alpha=0.1, fill_alpha=0.1
-                )
-            if line["marker"] in self.MARKERS:
-                marker_func = getattr(self.plot, line["marker"])
-                marker_func(
-                    x="x",
-                    y="y",
-                    size=line["marker_size"],
-                    legend_label=line["legend"],
-                    line_color=line["color"],
-                    fill_color=line["color"],
-                    name=line["y_axis"],
-                    y_range_name=line["y_axis"],
-                    source=source,
-                )
-            if line["y_axis"] == "secondary":
-                two_axes = True
-
-        # x-axis formatting
-        self.plot.xaxis.axis_label = self.fig_property["x_label"]
-        self.plot.x_range.range_padding = 0
-        self.plot.xaxis[0].axis_label_text_font_size = self.fig_property[
-            "axis_label_size"
-        ]
-        self.plot.xaxis.major_label_text_font_size = self.fig_property[
-            "axis_tick_label_size"
-        ]
-        # y-axis formatting
-        self.plot.yaxis[0].axis_label = self.fig_property["primary_y_label"]
-        self.plot.yaxis[0].axis_label_text_font_size = self.fig_property[
-            "axis_label_size"
-        ]
-        self.plot.yaxis.major_label_text_font_size = self.fig_property[
-            "axis_tick_label_size"
-        ]
-        self.plot.y_range = bokeh.models.DataRange1d(names=["default"])
-        if two_axes and "secondary" not in self.plot.extra_y_ranges:
-            self.plot.extra_y_ranges = {
-                "secondary": bokeh.models.DataRange1d(names=["secondary"])
-            }
-            self.plot.add_layout(
-                bokeh.models.LinearAxis(
-                    y_range_name="secondary",
-                    axis_label=self.fig_property["secondary_y_label"],
-                    axis_label_text_font_size=self.fig_property["axis_label_size"],
-                ),
-                "right",
-            )
-        # plot formatting
-        self.plot.legend.location = "top_right"
-        self.plot.legend.click_policy = "hide"
-        self.plot.title.text_font_size = self.fig_property["title_size"]
-        self.plot.legend.label_text_font_size = self.fig_property["legend_label_size"]
-
-        if output_file is not None:
-            self.save_figure(output_file, save_json)
-        return self.plot
-
-    def load_from_json(self, file_path):
-        with open(file_path, "r") as json_file:
-            fig_dict = json.load(json_file)
-        self.fig_property = fig_dict["fig_property"]
-        self.figure_data = fig_dict["figure_data"]
-
-    def _save_figure_json(self, output_file):
-        """Function to save a json format of a figure"""
-        figure_dict = collections.OrderedDict(
-            fig_property=self.fig_property, figure_data=self.figure_data
-        )
-        output_file = output_file.replace(".html", "_plot_data.json")
-        with open(output_file, "w") as outfile:
-            json.dump(figure_dict, outfile, indent=4)
-
-    def save_figure(self, output_file, save_json=True):
-        """Function to save BokehFigure.
-
-        Args:
-            output_file: string specifying output file path
-            save_json: flag controlling json outputs
-        """
-        if save_json:
-            self._save_figure_json(output_file)
-        bokeh.io.output_file(output_file)
-        bokeh.io.save(self.plot)
-
-    @staticmethod
-    def save_figures(figure_array, output_file_path, save_json=True):
-        """Function to save list of BokehFigures in one file.
-
-        Args:
-            figure_array: list of BokehFigure object to be plotted
-            output_file: string specifying output file path
-        """
-        for idx, figure in enumerate(figure_array):
-            figure.generate_figure()
-            if save_json:
-                json_file_path = output_file_path.replace(
-                    ".html", "{}-plot_data.json".format(idx)
-                )
-                figure._save_figure_json(json_file_path)
-        plot_array = [figure.plot for figure in figure_array]
-        all_plots = bokeh.layouts.column(children=plot_array, sizing_mode="scale_width")
-        bokeh.plotting.output_file(output_file_path)
-        bokeh.plotting.save(all_plots)
diff --git a/src/antlion/test_utils/wifi/wifi_performance_test_utils/brcm_utils.py b/src/antlion/test_utils/wifi/wifi_performance_test_utils/brcm_utils.py
deleted file mode 100644
index 2c0dc4c..0000000
--- a/src/antlion/test_utils/wifi/wifi_performance_test_utils/brcm_utils.py
+++ /dev/null
@@ -1,734 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-import hashlib
-import itertools
-import logging
-import math
-import numpy
-import re
-import statistics
-import time
-
-VERY_SHORT_SLEEP = 0.5
-SHORT_SLEEP = 1
-MED_SLEEP = 6
-DISCONNECTION_MESSAGE_BRCM = "driver adapter not found"
-RSSI_ERROR_VAL = float("nan")
-RATE_TABLE = {
-    "HT": {
-        1: {
-            20: [7.2, 14.4, 21.7, 28.9, 43.4, 57.8, 65.0, 72.2],
-            40: [15.0, 30.0, 45.0, 60.0, 90.0, 120.0, 135.0, 150.0],
-        },
-        2: {
-            20: [
-                0,
-                0,
-                0,
-                0,
-                0,
-                0,
-                0,
-                0,
-                14.4,
-                28.8,
-                43.4,
-                57.8,
-                86.8,
-                115.6,
-                130,
-                144.4,
-            ],
-            40: [0, 0, 0, 0, 0, 0, 0, 0, 30, 60, 90, 120, 180, 240, 270, 300],
-        },
-    },
-    "VHT": {
-        1: {
-            20: [
-                7.2,
-                14.4,
-                21.7,
-                28.9,
-                43.4,
-                57.8,
-                65.0,
-                72.2,
-                86.7,
-                96.2,
-                129.0,
-                143.4,
-            ],
-            40: [
-                15.0,
-                30.0,
-                45.0,
-                60.0,
-                90.0,
-                120.0,
-                135.0,
-                150.0,
-                180.0,
-                200.0,
-                258,
-                286.8,
-            ],
-            80: [
-                32.5,
-                65.0,
-                97.5,
-                130.0,
-                195.0,
-                260.0,
-                292.5,
-                325.0,
-                390.0,
-                433.3,
-                540.4,
-                600.4,
-            ],
-            160: [
-                65.0,
-                130.0,
-                195.0,
-                260.0,
-                390.0,
-                520.0,
-                585.0,
-                650.0,
-                780.0,
-                1080.8,
-                1200.8,
-            ],
-        },
-        2: {
-            20: [
-                14.4,
-                28.8,
-                43.4,
-                57.8,
-                86.8,
-                115.6,
-                130,
-                144.4,
-                173.4,
-                192.4,
-                258,
-                286.8,
-            ],
-            40: [30, 60, 90, 120, 180, 240, 270, 300, 360, 400, 516, 573.6],
-            80: [65, 130, 195, 260, 390, 520, 585, 650, 780, 866.6, 1080.8, 1200.8],
-            160: [130, 260, 390, 520, 780, 1040, 1170, 1300, 1560, 2161.6, 2401.6],
-        },
-    },
-    "HE": {
-        1: {
-            20: [
-                8.6,
-                17.2,
-                25.8,
-                34.4,
-                51.6,
-                68.8,
-                77.4,
-                86.0,
-                103.2,
-                114.7,
-                129.0,
-                143.4,
-            ],
-            40: [
-                17.2,
-                34.4,
-                51.6,
-                68.8,
-                103.2,
-                137.6,
-                154.8,
-                172,
-                206.4,
-                229.4,
-                258,
-                286.8,
-            ],
-            80: [
-                36.0,
-                72.1,
-                108.1,
-                144.1,
-                216.2,
-                288.2,
-                324.3,
-                360.3,
-                432.4,
-                480.4,
-                540.4,
-                600.4,
-            ],
-            160: [
-                72,
-                144.2,
-                216.2,
-                288.2,
-                432.4,
-                576.4,
-                648.6,
-                720.6,
-                864.8,
-                960.8,
-                1080.8,
-                1200.8,
-            ],
-        },
-        2: {
-            20: [
-                17.2,
-                34.4,
-                51.6,
-                68.8,
-                103.2,
-                137.6,
-                154.8,
-                172,
-                206.4,
-                229.4,
-                258,
-                286.8,
-            ],
-            40: [
-                34.4,
-                68.8,
-                103.2,
-                137.6,
-                206.4,
-                275.2,
-                309.6,
-                344,
-                412.8,
-                458.8,
-                516,
-                573.6,
-            ],
-            80: [
-                72,
-                144.2,
-                216.2,
-                288.2,
-                432.4,
-                576.4,
-                648.6,
-                720.6,
-                864.8,
-                960.8,
-                1080.8,
-                1200.8,
-            ],
-            160: [
-                144,
-                288.4,
-                432.4,
-                576.4,
-                864.8,
-                1152.8,
-                1297.2,
-                1441.2,
-                1729.6,
-                1921.6,
-                2161.6,
-                2401.6,
-            ],
-        },
-    },
-}
-
-
-# Rssi Utilities
-def empty_rssi_result():
-    return collections.OrderedDict([("data", []), ("mean", None), ("stdev", None)])
-
-
-def get_connected_rssi(
-    dut,
-    num_measurements=1,
-    polling_frequency=SHORT_SLEEP,
-    first_measurement_delay=0,
-    disconnect_warning=True,
-    ignore_samples=0,
-    interface="wlan0",
-):
-    # yapf: disable
-    connected_rssi = collections.OrderedDict(
-        [('time_stamp', []),
-         ('bssid', []), ('ssid', []), ('frequency', []),
-         ('signal_poll_rssi', empty_rssi_result()),
-         ('signal_poll_avg_rssi', empty_rssi_result()),
-         ('chain_0_rssi', empty_rssi_result()),
-         ('chain_1_rssi', empty_rssi_result())])
-
-    # yapf: enable
-    previous_bssid = "disconnected"
-    t0 = time.time()
-    time.sleep(first_measurement_delay)
-    for idx in range(num_measurements):
-        measurement_start_time = time.time()
-        connected_rssi["time_stamp"].append(measurement_start_time - t0)
-        # Get signal poll RSSI
-        try:
-            status_output = dut.adb.shell("wpa_cli -i {} status".format(interface))
-        except:
-            status_output = ""
-        match = re.search("bssid=.*", status_output)
-        if match:
-            current_bssid = match.group(0).split("=")[1]
-            connected_rssi["bssid"].append(current_bssid)
-        else:
-            current_bssid = "disconnected"
-            connected_rssi["bssid"].append(current_bssid)
-            if disconnect_warning and previous_bssid != "disconnected":
-                logging.warning("WIFI DISCONNECT DETECTED!")
-
-        previous_bssid = current_bssid
-        match = re.search("\s+ssid=.*", status_output)
-        if match:
-            ssid = match.group(0).split("=")[1]
-            connected_rssi["ssid"].append(ssid)
-        else:
-            connected_rssi["ssid"].append("disconnected")
-
-        # TODO: SEARCH MAP ; PICK CENTER CHANNEL
-        match = re.search("\s+freq=.*", status_output)
-        if match:
-            frequency = int(match.group(0).split("=")[1])
-            connected_rssi["frequency"].append(frequency)
-        else:
-            connected_rssi["frequency"].append(RSSI_ERROR_VAL)
-
-        if interface == "wlan0":
-            try:
-                per_chain_rssi = dut.adb.shell("wl phy_rssi_ant")
-                chain_0_rssi = re.search(
-                    r"rssi\[0\]\s(?P<chain_0_rssi>[0-9\-]*)", per_chain_rssi
-                )
-                if chain_0_rssi:
-                    chain_0_rssi = int(chain_0_rssi.group("chain_0_rssi"))
-                else:
-                    chain_0_rssi = -float("inf")
-                chain_1_rssi = re.search(
-                    r"rssi\[1\]\s(?P<chain_1_rssi>[0-9\-]*)", per_chain_rssi
-                )
-                if chain_1_rssi:
-                    chain_1_rssi = int(chain_1_rssi.group("chain_1_rssi"))
-                else:
-                    chain_1_rssi = -float("inf")
-            except:
-                chain_0_rssi = RSSI_ERROR_VAL
-                chain_1_rssi = RSSI_ERROR_VAL
-            connected_rssi["chain_0_rssi"]["data"].append(chain_0_rssi)
-            connected_rssi["chain_1_rssi"]["data"].append(chain_1_rssi)
-            combined_rssi = math.pow(10, chain_0_rssi / 10) + math.pow(
-                10, chain_1_rssi / 10
-            )
-            combined_rssi = 10 * math.log10(combined_rssi)
-            connected_rssi["signal_poll_rssi"]["data"].append(combined_rssi)
-            connected_rssi["signal_poll_avg_rssi"]["data"].append(combined_rssi)
-        else:
-            try:
-                signal_poll_output = dut.adb.shell(
-                    "wpa_cli -i {} signal_poll".format(interface)
-                )
-            except:
-                signal_poll_output = ""
-            match = re.search("RSSI=.*", signal_poll_output)
-            if match:
-                temp_rssi = int(match.group(0).split("=")[1])
-                if temp_rssi == -9999 or temp_rssi == 0:
-                    connected_rssi["signal_poll_rssi"]["data"].append(RSSI_ERROR_VAL)
-                else:
-                    connected_rssi["signal_poll_rssi"]["data"].append(temp_rssi)
-            else:
-                connected_rssi["signal_poll_rssi"]["data"].append(RSSI_ERROR_VAL)
-            connected_rssi["chain_0_rssi"]["data"].append(RSSI_ERROR_VAL)
-            connected_rssi["chain_1_rssi"]["data"].append(RSSI_ERROR_VAL)
-        measurement_elapsed_time = time.time() - measurement_start_time
-        time.sleep(max(0, polling_frequency - measurement_elapsed_time))
-
-    # Statistics, Statistics
-    for key, val in connected_rssi.copy().items():
-        if "data" not in val:
-            continue
-        filtered_rssi_values = [x for x in val["data"] if not math.isnan(x)]
-        if len(filtered_rssi_values) > ignore_samples:
-            filtered_rssi_values = filtered_rssi_values[ignore_samples:]
-        if filtered_rssi_values:
-            connected_rssi[key]["mean"] = statistics.mean(filtered_rssi_values)
-            if len(filtered_rssi_values) > 1:
-                connected_rssi[key]["stdev"] = statistics.stdev(filtered_rssi_values)
-            else:
-                connected_rssi[key]["stdev"] = 0
-        else:
-            connected_rssi[key]["mean"] = RSSI_ERROR_VAL
-            connected_rssi[key]["stdev"] = RSSI_ERROR_VAL
-
-    return connected_rssi
-
-
-def get_scan_rssi(dut, tracked_bssids, num_measurements=1):
-    scan_rssi = collections.OrderedDict()
-    for bssid in tracked_bssids:
-        scan_rssi[bssid] = empty_rssi_result()
-    for idx in range(num_measurements):
-        scan_output = dut.adb.shell("cmd wifi start-scan")
-        time.sleep(MED_SLEEP)
-        scan_output = dut.adb.shell("cmd wifi list-scan-results")
-        for bssid in tracked_bssids:
-            bssid_result = re.search(bssid + ".*", scan_output, flags=re.IGNORECASE)
-            if bssid_result:
-                bssid_result = bssid_result.group(0).split()
-                scan_rssi[bssid]["data"].append(int(bssid_result[2]))
-            else:
-                scan_rssi[bssid]["data"].append(RSSI_ERROR_VAL)
-    # Compute mean RSSIs. Only average valid readings.
-    # Output RSSI_ERROR_VAL if no readings found.
-    for key, val in scan_rssi.items():
-        filtered_rssi_values = [x for x in val["data"] if not math.isnan(x)]
-        if filtered_rssi_values:
-            scan_rssi[key]["mean"] = statistics.mean(filtered_rssi_values)
-            if len(filtered_rssi_values) > 1:
-                scan_rssi[key]["stdev"] = statistics.stdev(filtered_rssi_values)
-            else:
-                scan_rssi[key]["stdev"] = 0
-        else:
-            scan_rssi[key]["mean"] = RSSI_ERROR_VAL
-            scan_rssi[key]["stdev"] = RSSI_ERROR_VAL
-    return scan_rssi
-
-
-def get_sw_signature(dut):
-    bdf_output = dut.adb.shell("cksum /vendor/firmware/bcmdhd*")
-    logging.debug("BDF Checksum output: {}".format(bdf_output))
-    bdf_signature = (
-        sum([int(line.split(" ")[0]) for line in bdf_output.splitlines()]) % 1000
-    )
-
-    fw_version = dut.adb.shell("getprop vendor.wlan.firmware.version")
-    driver_version = dut.adb.shell("getprop vendor.wlan.driver.version")
-    logging.debug(
-        "Firmware version : {}. Driver version: {}".format(fw_version, driver_version)
-    )
-    fw_signature = "{}+{}".format(fw_version, driver_version)
-    fw_signature = int(hashlib.md5(fw_signature.encode()).hexdigest(), 16) % 1000
-    serial_hash = int(hashlib.md5(dut.serial.encode()).hexdigest(), 16) % 1000
-    return {
-        "config_signature": bdf_signature,
-        "fw_signature": fw_signature,
-        "serial_hash": serial_hash,
-    }
-
-
-def get_country_code(dut):
-    try:
-        country_code = dut.adb.shell("wl country").split(" ")[0]
-    except:
-        country_code = "XZ"
-    if country_code == "XZ":
-        country_code = "WW"
-    logging.debug("Country code: {}".format(country_code))
-    return country_code
-
-
-def push_config(dut, config_file):
-    config_files_list = dut.adb.shell("ls /vendor/etc/*.cal").splitlines()
-    for dst_file in config_files_list:
-        dut.push_system_file(config_file, dst_file)
-    dut.reboot()
-
-
-def start_wifi_logging(dut):
-    pass
-
-
-def stop_wifi_logging(dut):
-    pass
-
-
-def push_firmware(dut, firmware_files):
-    """Function to push Wifi firmware files
-
-    Args:
-        dut: dut to push bdf file to
-        firmware_files: path to wlanmdsp.mbn file
-        datamsc_file: path to Data.msc file
-    """
-    for file in firmware_files:
-        dut.push_system_file(file, "/vendor/firmware/")
-    dut.reboot()
-
-
-def disable_beamforming(dut):
-    dut.adb.shell("wl txbf 0")
-
-
-def set_nss_capability(dut, nss):
-    dut.adb.shell("wl he omi -r {} -t {}".format(nss, nss))
-
-
-def set_chain_mask(dut, chain):
-    if chain == "2x2":
-        chain = 3
-    else:
-        chain = chain + 1
-    # Get current chain mask
-    try:
-        curr_tx_chain = int(dut.adb.shell("wl txchain"))
-        curr_rx_chain = int(dut.adb.shell("wl rxchain"))
-    except:
-        curr_tx_chain = -1
-        curr_rx_chain = -1
-    if curr_tx_chain == chain and curr_rx_chain == chain:
-        return
-    # Set chain mask if needed
-    dut.adb.shell("wl down")
-    time.sleep(VERY_SHORT_SLEEP)
-    dut.adb.shell("wl txchain 0x{}".format(chain))
-    dut.adb.shell("wl rxchain 0x{}".format(chain))
-    dut.adb.shell("wl up")
-
-
-class LinkLayerStats:
-    LLSTATS_CMD = "wl dump ampdu; wl counters;"
-    LL_STATS_CLEAR_CMD = "wl dump_clear ampdu; wl reset_cnts;"
-    BW_REGEX = re.compile(r"Chanspec:.+ (?P<bandwidth>[0-9]+)MHz")
-    MCS_REGEX = re.compile(r"(?P<count>[0-9]+)\((?P<percent>[0-9]+)%\)")
-    RX_REGEX = re.compile(
-        r"RX (?P<mode>\S+)\s+:\s*(?P<nss1>[0-9, ,(,),%]*)"
-        "\n\s*:?\s*(?P<nss2>[0-9, ,(,),%]*)"
-    )
-    TX_REGEX = re.compile(
-        r"TX (?P<mode>\S+)\s+:\s*(?P<nss1>[0-9, ,(,),%]*)"
-        "\n\s*:?\s*(?P<nss2>[0-9, ,(,),%]*)"
-    )
-    TX_PER_REGEX = re.compile(
-        r"(?P<mode>\S+) PER\s+:\s*(?P<nss1>[0-9, ,(,),%]*)"
-        "\n\s*:?\s*(?P<nss2>[0-9, ,(,),%]*)"
-    )
-    RX_FCS_REGEX = re.compile(
-        r"rxbadfcs (?P<rx_bad_fcs>[0-9]*).+\n.+goodfcs (?P<rx_good_fcs>[0-9]*)"
-    )
-    RX_AGG_REGEX = re.compile(r"rxmpduperampdu (?P<aggregation>[0-9]*)")
-    TX_AGG_REGEX = re.compile(r" mpduperampdu (?P<aggregation>[0-9]*)")
-    TX_AGG_STOP_REGEX = re.compile(
-        r"agg stop reason: tot_agg_tried (?P<agg_tried>[0-9]+) agg_txcancel (?P<agg_canceled>[0-9]+) (?P<agg_stop_reason>.+)"
-    )
-    TX_AGG_STOP_REASON_REGEX = re.compile(
-        r"(?P<reason>\w+) [0-9]+ \((?P<value>[0-9]+%)\)"
-    )
-    MCS_ID = collections.namedtuple(
-        "mcs_id", ["mode", "num_streams", "bandwidth", "mcs", "gi"]
-    )
-    MODE_MAP = {"0": "11a/g", "1": "11b", "2": "11n", "3": "11ac"}
-    BW_MAP = {"0": 20, "1": 40, "2": 80}
-
-    def __init__(self, dut, llstats_enabled=True):
-        self.dut = dut
-        self.llstats_enabled = llstats_enabled
-        self.llstats_cumulative = self._empty_llstats()
-        self.llstats_incremental = self._empty_llstats()
-
-    def update_stats(self):
-        if self.llstats_enabled:
-            try:
-                llstats_output = self.dut.adb.shell(self.LLSTATS_CMD, timeout=1)
-                self.dut.adb.shell_nb(self.LL_STATS_CLEAR_CMD)
-
-                wl_join = self.dut.adb.shell("wl status")
-                self.bandwidth = int(
-                    re.search(self.BW_REGEX, wl_join).group("bandwidth")
-                )
-            except:
-                llstats_output = ""
-        else:
-            llstats_output = ""
-        self._update_stats(llstats_output)
-
-    def reset_stats(self):
-        self.llstats_cumulative = self._empty_llstats()
-        self.llstats_incremental = self._empty_llstats()
-
-    def _empty_llstats(self):
-        return collections.OrderedDict(
-            mcs_stats=collections.OrderedDict(),
-            mpdu_stats=collections.OrderedDict(),
-            summary=collections.OrderedDict(),
-        )
-
-    def _empty_mcs_stat(self):
-        return collections.OrderedDict(
-            txmpdu=0, rxmpdu=0, mpdu_lost=0, retries=0, retries_short=0, retries_long=0
-        )
-
-    def _mcs_id_to_string(self, mcs_id):
-        mcs_string = "{} Nss{} MCS{} GI{}".format(
-            mcs_id.mode, mcs_id.num_streams, mcs_id.mcs, mcs_id.gi
-        )
-        return mcs_string
-
-    def _parse_mcs_stats(self, llstats_output):
-        llstats_dict = {}
-        # Look for per-peer stats
-        match = re.search(self.RX_REGEX, llstats_output)
-        if not match:
-            self.reset_stats()
-            return collections.OrderedDict()
-        # Find and process all matches for per stream stats
-        rx_match_iter = re.finditer(self.RX_REGEX, llstats_output)
-        tx_match_iter = re.finditer(self.TX_REGEX, llstats_output)
-        tx_per_match_iter = re.finditer(self.TX_PER_REGEX, llstats_output)
-        for rx_match, tx_match, tx_per_match in zip(
-            rx_match_iter, tx_match_iter, tx_per_match_iter
-        ):
-            mode = rx_match.group("mode")
-            mode = "HT" if mode == "MCS" else mode
-            for nss in [1, 2]:
-                rx_mcs_iter = re.finditer(self.MCS_REGEX, rx_match.group(nss + 1))
-                tx_mcs_iter = re.finditer(self.MCS_REGEX, tx_match.group(nss + 1))
-                tx_per_iter = re.finditer(self.MCS_REGEX, tx_per_match.group(nss + 1))
-                for mcs, (rx_mcs_stats, tx_mcs_stats, tx_per_mcs_stats) in enumerate(
-                    itertools.zip_longest(rx_mcs_iter, tx_mcs_iter, tx_per_iter)
-                ):
-                    current_mcs = self.MCS_ID(
-                        mode,
-                        nss,
-                        self.bandwidth,
-                        mcs + int(8 * (mode == "HT") * (nss - 1)),
-                        0,
-                    )
-                    current_stats = collections.OrderedDict(
-                        txmpdu=int(tx_mcs_stats.group("count")) if tx_mcs_stats else 0,
-                        rxmpdu=int(rx_mcs_stats.group("count")) if rx_mcs_stats else 0,
-                        mpdu_lost=0,
-                        retries=tx_per_mcs_stats.group("count")
-                        if tx_per_mcs_stats
-                        else 0,
-                        retries_short=0,
-                        retries_long=0,
-                        mcs_id=current_mcs,
-                    )
-                    llstats_dict[self._mcs_id_to_string(current_mcs)] = current_stats
-        return llstats_dict
-
-    def _parse_mpdu_stats(self, llstats_output):
-        rx_agg_match = re.search(self.RX_AGG_REGEX, llstats_output)
-        tx_agg_match = re.search(self.TX_AGG_REGEX, llstats_output)
-        tx_agg_stop_match = re.search(self.TX_AGG_STOP_REGEX, llstats_output)
-        rx_fcs_match = re.search(self.RX_FCS_REGEX, llstats_output)
-
-        if rx_agg_match and tx_agg_match and tx_agg_stop_match and rx_fcs_match:
-            agg_stop_dict = collections.OrderedDict(
-                rx_aggregation=int(rx_agg_match.group("aggregation")),
-                tx_aggregation=int(tx_agg_match.group("aggregation")),
-                tx_agg_tried=int(tx_agg_stop_match.group("agg_tried")),
-                tx_agg_canceled=int(tx_agg_stop_match.group("agg_canceled")),
-                rx_good_fcs=int(rx_fcs_match.group("rx_good_fcs")),
-                rx_bad_fcs=int(rx_fcs_match.group("rx_bad_fcs")),
-                agg_stop_reason=collections.OrderedDict(),
-            )
-            agg_reason_match = re.finditer(
-                self.TX_AGG_STOP_REASON_REGEX,
-                tx_agg_stop_match.group("agg_stop_reason"),
-            )
-            for reason_match in agg_reason_match:
-                agg_stop_dict["agg_stop_reason"][
-                    reason_match.group("reason")
-                ] = reason_match.group("value")
-
-        else:
-            agg_stop_dict = collections.OrderedDict(
-                rx_aggregation=0,
-                tx_aggregation=0,
-                tx_agg_tried=0,
-                tx_agg_canceled=0,
-                rx_good_fcs=0,
-                rx_bad_fcs=0,
-                agg_stop_reason=None,
-            )
-        return agg_stop_dict
-
-    def _generate_stats_summary(self, llstats_dict):
-        llstats_summary = collections.OrderedDict(
-            common_tx_mcs=None,
-            common_tx_mcs_count=0,
-            common_tx_mcs_freq=0,
-            common_rx_mcs=None,
-            common_rx_mcs_count=0,
-            common_rx_mcs_freq=0,
-            rx_per=float("nan"),
-        )
-        mcs_ids = []
-        tx_mpdu = []
-        rx_mpdu = []
-        phy_rates = []
-        for mcs_str, mcs_stats in llstats_dict["mcs_stats"].items():
-            mcs_id = mcs_stats["mcs_id"]
-            mcs_ids.append(mcs_str)
-            tx_mpdu.append(mcs_stats["txmpdu"])
-            rx_mpdu.append(mcs_stats["rxmpdu"])
-            phy_rates.append(
-                RATE_TABLE[mcs_id.mode][mcs_id.num_streams][mcs_id.bandwidth][
-                    mcs_id.mcs
-                ]
-            )
-        if len(tx_mpdu) == 0 or len(rx_mpdu) == 0:
-            return llstats_summary
-        llstats_summary["common_tx_mcs"] = mcs_ids[numpy.argmax(tx_mpdu)]
-        llstats_summary["common_tx_mcs_count"] = numpy.max(tx_mpdu)
-        llstats_summary["common_rx_mcs"] = mcs_ids[numpy.argmax(rx_mpdu)]
-        llstats_summary["common_rx_mcs_count"] = numpy.max(rx_mpdu)
-        if sum(tx_mpdu) and sum(rx_mpdu):
-            llstats_summary["mean_tx_phy_rate"] = numpy.average(
-                phy_rates, weights=tx_mpdu
-            )
-            llstats_summary["mean_rx_phy_rate"] = numpy.average(
-                phy_rates, weights=rx_mpdu
-            )
-            llstats_summary["common_tx_mcs_freq"] = llstats_summary[
-                "common_tx_mcs_count"
-            ] / sum(tx_mpdu)
-            llstats_summary["common_rx_mcs_freq"] = llstats_summary[
-                "common_rx_mcs_count"
-            ] / sum(rx_mpdu)
-            total_rx_frames = (
-                llstats_dict["mpdu_stats"]["rx_good_fcs"]
-                + llstats_dict["mpdu_stats"]["rx_bad_fcs"]
-            )
-            if total_rx_frames:
-                llstats_summary["rx_per"] = (
-                    llstats_dict["mpdu_stats"]["rx_bad_fcs"] / (total_rx_frames)
-                ) * 100
-        return llstats_summary
-
-    def _update_stats(self, llstats_output):
-        self.llstats_cumulative = self._empty_llstats()
-        self.llstats_incremental = self._empty_llstats()
-        self.llstats_incremental["raw_output"] = llstats_output
-        self.llstats_incremental["mcs_stats"] = self._parse_mcs_stats(llstats_output)
-        self.llstats_incremental["mpdu_stats"] = self._parse_mpdu_stats(llstats_output)
-        self.llstats_incremental["summary"] = self._generate_stats_summary(
-            self.llstats_incremental
-        )
-        self.llstats_cumulative["summary"] = self._generate_stats_summary(
-            self.llstats_cumulative
-        )
diff --git a/src/antlion/test_utils/wifi/wifi_performance_test_utils/ping_utils.py b/src/antlion/test_utils/wifi/wifi_performance_test_utils/ping_utils.py
deleted file mode 100644
index 8d0dff5..0000000
--- a/src/antlion/test_utils/wifi/wifi_performance_test_utils/ping_utils.py
+++ /dev/null
@@ -1,132 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import re
-
-RTT_REGEX = re.compile(r"^\[(?P<timestamp>\S+)\] .*? time=(?P<rtt>\S+)")
-LOSS_REGEX = re.compile(r"(?P<loss>\S+)% packet loss")
-
-
-class PingResult(object):
-    """An object that contains the results of running ping command.
-
-    Attributes:
-        connected: True if a connection was made. False otherwise.
-        packet_loss_percentage: The total percentage of packets lost.
-        transmission_times: The list of PingTransmissionTimes containing the
-            timestamps gathered for transmitted packets.
-        rtts: An list-like object enumerating all round-trip-times of
-            transmitted packets.
-        timestamps: A list-like object enumerating the beginning timestamps of
-            each packet transmission.
-        ping_interarrivals: A list-like object enumerating the amount of time
-            between the beginning of each subsequent transmission.
-    """
-
-    def __init__(self, ping_output):
-        self.packet_loss_percentage = 100
-        self.transmission_times = []
-
-        self.rtts = _ListWrap(self.transmission_times, lambda entry: entry.rtt)
-        self.timestamps = _ListWrap(
-            self.transmission_times, lambda entry: entry.timestamp
-        )
-        self.ping_interarrivals = _PingInterarrivals(self.transmission_times)
-
-        self.start_time = 0
-        for line in ping_output:
-            if "loss" in line:
-                match = re.search(LOSS_REGEX, line)
-                self.packet_loss_percentage = float(match.group("loss"))
-            if "time=" in line:
-                match = re.search(RTT_REGEX, line)
-                if self.start_time == 0:
-                    self.start_time = float(match.group("timestamp"))
-                self.transmission_times.append(
-                    PingTransmissionTimes(
-                        float(match.group("timestamp")) - self.start_time,
-                        float(match.group("rtt")),
-                    )
-                )
-        self.connected = len(ping_output) > 1 and self.packet_loss_percentage < 100
-
-    def __getitem__(self, item):
-        if item == "rtt":
-            return self.rtts
-        if item == "connected":
-            return self.connected
-        if item == "packet_loss_percentage":
-            return self.packet_loss_percentage
-        raise ValueError("Invalid key. Please use an attribute instead.")
-
-    def as_dict(self):
-        return {
-            "connected": 1 if self.connected else 0,
-            "rtt": list(self.rtts),
-            "time_stamp": list(self.timestamps),
-            "ping_interarrivals": list(self.ping_interarrivals),
-            "packet_loss_percentage": self.packet_loss_percentage,
-        }
-
-
-class PingTransmissionTimes(object):
-    """A class that holds the timestamps for a packet sent via the ping command.
-
-    Attributes:
-        rtt: The round trip time for the packet sent.
-        timestamp: The timestamp the packet started its trip.
-    """
-
-    def __init__(self, timestamp, rtt):
-        self.rtt = rtt
-        self.timestamp = timestamp
-
-
-class _ListWrap(object):
-    """A convenient helper class for treating list iterators as native lists."""
-
-    def __init__(self, wrapped_list, func):
-        self.__wrapped_list = wrapped_list
-        self.__func = func
-
-    def __getitem__(self, key):
-        return self.__func(self.__wrapped_list[key])
-
-    def __iter__(self):
-        for item in self.__wrapped_list:
-            yield self.__func(item)
-
-    def __len__(self):
-        return len(self.__wrapped_list)
-
-
-class _PingInterarrivals(object):
-    """A helper class for treating ping interarrivals as a native list."""
-
-    def __init__(self, ping_entries):
-        self.__ping_entries = ping_entries
-
-    def __getitem__(self, key):
-        return (
-            self.__ping_entries[key + 1].timestamp - self.__ping_entries[key].timestamp
-        )
-
-    def __iter__(self):
-        for index in range(len(self.__ping_entries) - 1):
-            yield self[index]
-
-    def __len__(self):
-        return max(0, len(self.__ping_entries) - 1)
diff --git a/src/antlion/test_utils/wifi/wifi_performance_test_utils/qcom_utils.py b/src/antlion/test_utils/wifi/wifi_performance_test_utils/qcom_utils.py
deleted file mode 100644
index 2f50cf1..0000000
--- a/src/antlion/test_utils/wifi/wifi_performance_test_utils/qcom_utils.py
+++ /dev/null
@@ -1,470 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import collections
-import hashlib
-import logging
-import math
-import os
-import re
-import statistics
-import time
-
-from mobly import asserts
-
-SHORT_SLEEP = 1
-MED_SLEEP = 6
-STATION_DUMP = "iw {} station dump"
-SCAN = "wpa_cli scan"
-SCAN_RESULTS = "wpa_cli scan_results"
-SIGNAL_POLL = "wpa_cli signal_poll"
-WPA_CLI_STATUS = "wpa_cli status"
-RSSI_ERROR_VAL = float("nan")
-FW_REGEX = re.compile(r"FW:(?P<firmware>\S+) HW:")
-
-
-# Rssi Utilities
-def empty_rssi_result():
-    return collections.OrderedDict([("data", []), ("mean", None), ("stdev", None)])
-
-
-def get_connected_rssi(
-    dut,
-    num_measurements=1,
-    polling_frequency=SHORT_SLEEP,
-    first_measurement_delay=0,
-    disconnect_warning=True,
-    ignore_samples=0,
-    interface="wlan0",
-):
-    # yapf: disable
-    connected_rssi = collections.OrderedDict(
-        [('time_stamp', []),
-         ('bssid', []), ('ssid', []), ('frequency', []),
-         ('signal_poll_rssi', empty_rssi_result()),
-         ('signal_poll_avg_rssi', empty_rssi_result()),
-         ('chain_0_rssi', empty_rssi_result()),
-         ('chain_1_rssi', empty_rssi_result())])
-    # yapf: enable
-    previous_bssid = "disconnected"
-    t0 = time.time()
-    time.sleep(first_measurement_delay)
-    for idx in range(num_measurements):
-        measurement_start_time = time.time()
-        connected_rssi["time_stamp"].append(measurement_start_time - t0)
-        # Get signal poll RSSI
-        try:
-            status_output = dut.adb.shell("wpa_cli -i {} status".format(interface))
-        except:
-            status_output = ""
-        match = re.search("bssid=.*", status_output)
-        if match:
-            current_bssid = match.group(0).split("=")[1]
-            connected_rssi["bssid"].append(current_bssid)
-        else:
-            current_bssid = "disconnected"
-            connected_rssi["bssid"].append(current_bssid)
-            if disconnect_warning and previous_bssid != "disconnected":
-                logging.warning("WIFI DISCONNECT DETECTED!")
-        previous_bssid = current_bssid
-        match = re.search("\s+ssid=.*", status_output)
-        if match:
-            ssid = match.group(0).split("=")[1]
-            connected_rssi["ssid"].append(ssid)
-        else:
-            connected_rssi["ssid"].append("disconnected")
-        try:
-            signal_poll_output = dut.adb.shell(
-                "wpa_cli -i {} signal_poll".format(interface)
-            )
-        except:
-            signal_poll_output = ""
-        match = re.search("FREQUENCY=.*", signal_poll_output)
-        if match:
-            frequency = int(match.group(0).split("=")[1])
-            connected_rssi["frequency"].append(frequency)
-        else:
-            connected_rssi["frequency"].append(RSSI_ERROR_VAL)
-        match = re.search("RSSI=.*", signal_poll_output)
-        if match:
-            temp_rssi = int(match.group(0).split("=")[1])
-            if temp_rssi == -9999 or temp_rssi == 0:
-                connected_rssi["signal_poll_rssi"]["data"].append(RSSI_ERROR_VAL)
-            else:
-                connected_rssi["signal_poll_rssi"]["data"].append(temp_rssi)
-        else:
-            connected_rssi["signal_poll_rssi"]["data"].append(RSSI_ERROR_VAL)
-        match = re.search("AVG_RSSI=.*", signal_poll_output)
-        if match:
-            connected_rssi["signal_poll_avg_rssi"]["data"].append(
-                int(match.group(0).split("=")[1])
-            )
-        else:
-            connected_rssi["signal_poll_avg_rssi"]["data"].append(RSSI_ERROR_VAL)
-
-        # Get per chain RSSI
-        try:
-            per_chain_rssi = dut.adb.shell(STATION_DUMP.format(interface))
-        except:
-            per_chain_rssi = ""
-        match = re.search(".*signal avg:.*", per_chain_rssi)
-        if match:
-            per_chain_rssi = per_chain_rssi[
-                per_chain_rssi.find("[") + 1 : per_chain_rssi.find("]")
-            ]
-            per_chain_rssi = per_chain_rssi.split(", ")
-            connected_rssi["chain_0_rssi"]["data"].append(int(per_chain_rssi[0]))
-            connected_rssi["chain_1_rssi"]["data"].append(int(per_chain_rssi[1]))
-        else:
-            connected_rssi["chain_0_rssi"]["data"].append(RSSI_ERROR_VAL)
-            connected_rssi["chain_1_rssi"]["data"].append(RSSI_ERROR_VAL)
-        measurement_elapsed_time = time.time() - measurement_start_time
-        time.sleep(max(0, polling_frequency - measurement_elapsed_time))
-
-    # Compute mean RSSIs. Only average valid readings.
-    # Output RSSI_ERROR_VAL if no valid connected readings found.
-    for key, val in connected_rssi.copy().items():
-        if "data" not in val:
-            continue
-        filtered_rssi_values = [x for x in val["data"] if not math.isnan(x)]
-        if len(filtered_rssi_values) > ignore_samples:
-            filtered_rssi_values = filtered_rssi_values[ignore_samples:]
-        if filtered_rssi_values:
-            connected_rssi[key]["mean"] = statistics.mean(filtered_rssi_values)
-            if len(filtered_rssi_values) > 1:
-                connected_rssi[key]["stdev"] = statistics.stdev(filtered_rssi_values)
-            else:
-                connected_rssi[key]["stdev"] = 0
-        else:
-            connected_rssi[key]["mean"] = RSSI_ERROR_VAL
-            connected_rssi[key]["stdev"] = RSSI_ERROR_VAL
-    return connected_rssi
-
-
-def get_scan_rssi(dut, tracked_bssids, num_measurements=1):
-    scan_rssi = collections.OrderedDict()
-    for bssid in tracked_bssids:
-        scan_rssi[bssid] = empty_rssi_result()
-    for idx in range(num_measurements):
-        scan_output = dut.adb.shell(SCAN)
-        time.sleep(MED_SLEEP)
-        scan_output = dut.adb.shell(SCAN_RESULTS)
-        for bssid in tracked_bssids:
-            bssid_result = re.search(bssid + ".*", scan_output, flags=re.IGNORECASE)
-            if bssid_result:
-                bssid_result = bssid_result.group(0).split("\t")
-                scan_rssi[bssid]["data"].append(int(bssid_result[2]))
-            else:
-                scan_rssi[bssid]["data"].append(RSSI_ERROR_VAL)
-    # Compute mean RSSIs. Only average valid readings.
-    # Output RSSI_ERROR_VAL if no readings found.
-    for key, val in scan_rssi.items():
-        filtered_rssi_values = [x for x in val["data"] if not math.isnan(x)]
-        if filtered_rssi_values:
-            scan_rssi[key]["mean"] = statistics.mean(filtered_rssi_values)
-            if len(filtered_rssi_values) > 1:
-                scan_rssi[key]["stdev"] = statistics.stdev(filtered_rssi_values)
-            else:
-                scan_rssi[key]["stdev"] = 0
-        else:
-            scan_rssi[key]["mean"] = RSSI_ERROR_VAL
-            scan_rssi[key]["stdev"] = RSSI_ERROR_VAL
-    return scan_rssi
-
-
-def get_sw_signature(dut):
-    bdf_output = dut.adb.shell("cksum /vendor/firmware/bdwlan*")
-    logging.debug("BDF Checksum output: {}".format(bdf_output))
-    bdf_signature = (
-        sum([int(line.split(" ")[0]) for line in bdf_output.splitlines()]) % 1000
-    )
-
-    fw_output = dut.adb.shell("halutil -logger -get fw")
-    logging.debug("Firmware version output: {}".format(fw_output))
-    fw_version = re.search(FW_REGEX, fw_output).group("firmware")
-    fw_signature = fw_version.split(".")[-3:-1]
-    fw_signature = float(".".join(fw_signature))
-    serial_hash = int(hashlib.md5(dut.serial.encode()).hexdigest(), 16) % 1000
-    return {
-        "config_signature": bdf_signature,
-        "fw_signature": fw_signature,
-        "serial_hash": serial_hash,
-    }
-
-
-def get_country_code(dut):
-    country_code = dut.adb.shell("iw reg get | grep country | head -1")
-    country_code = country_code.split(":")[0].split(" ")[1]
-    if country_code == "00":
-        country_code = "WW"
-    return country_code
-
-
-def push_config(dut, config_file):
-    config_files_list = dut.adb.shell("ls /vendor/firmware/bdwlan*").splitlines()
-    for dst_file in config_files_list:
-        dut.push_system_file(config_file, dst_file)
-    dut.reboot()
-
-
-def start_wifi_logging(dut):
-    dut.droid.wifiEnableVerboseLogging(1)
-    msg = "Failed to enable WiFi verbose logging."
-    asserts.assert_equal(dut.droid.wifiGetVerboseLoggingLevel(), 1, msg)
-    logging.info("Starting CNSS logs")
-    dut.adb.shell(
-        "find /data/vendor/wifi/wlan_logs/ -type f -delete", ignore_status=True
-    )
-    dut.adb.shell_nb("cnss_diag -f -s")
-
-
-def stop_wifi_logging(dut):
-    logging.info("Stopping CNSS logs")
-    dut.adb.shell("killall cnss_diag")
-    logs = dut.get_file_names("/data/vendor/wifi/wlan_logs/")
-    if logs:
-        dut.log.info("Pulling cnss_diag logs %s", logs)
-        log_path = os.path.join(dut.device_log_path, "CNSS_DIAG_%s" % dut.serial)
-        os.makedirs(log_path, exist_ok=True)
-        dut.pull_files(logs, log_path)
-
-
-def push_firmware(dut, firmware_files):
-    """Function to push Wifi firmware files
-
-    Args:
-        dut: dut to push bdf file to
-        firmware_files: path to wlanmdsp.mbn file
-        datamsc_file: path to Data.msc file
-    """
-    for file in firmware_files:
-        dut.push_system_file(file, "/vendor/firmware/")
-    dut.reboot()
-
-
-def _set_ini_fields(ini_file_path, ini_field_dict):
-    template_regex = r"^{}=[0-9,.x-]+"
-    with open(ini_file_path, "r") as f:
-        ini_lines = f.read().splitlines()
-        for idx, line in enumerate(ini_lines):
-            for field_name, field_value in ini_field_dict.items():
-                line_regex = re.compile(template_regex.format(field_name))
-                if re.match(line_regex, line):
-                    ini_lines[idx] = "{}={}".format(field_name, field_value)
-                    print(ini_lines[idx])
-    with open(ini_file_path, "w") as f:
-        f.write("\n".join(ini_lines) + "\n")
-
-
-def _edit_dut_ini(dut, ini_fields):
-    """Function to edit Wifi ini files."""
-    dut_ini_path = "/vendor/firmware/wlan/qca_cld/WCNSS_qcom_cfg.ini"
-    local_ini_path = os.path.expanduser("~/WCNSS_qcom_cfg.ini")
-    dut.pull_files(dut_ini_path, local_ini_path)
-
-    _set_ini_fields(local_ini_path, ini_fields)
-
-    dut.push_system_file(local_ini_path, dut_ini_path)
-    dut.reboot()
-
-
-def set_chain_mask(dut, chain_mask):
-    curr_mask = getattr(dut, "chain_mask", "2x2")
-    if curr_mask == chain_mask:
-        return
-    dut.chain_mask = chain_mask
-    if chain_mask == "2x2":
-        ini_fields = {
-            "gEnable2x2": 2,
-            "gSetTxChainmask1x1": 1,
-            "gSetRxChainmask1x1": 1,
-            "gDualMacFeatureDisable": 6,
-            "gDot11Mode": 0,
-        }
-    else:
-        ini_fields = {
-            "gEnable2x2": 0,
-            "gSetTxChainmask1x1": chain_mask + 1,
-            "gSetRxChainmask1x1": chain_mask + 1,
-            "gDualMacFeatureDisable": 1,
-            "gDot11Mode": 0,
-        }
-    _edit_dut_ini(dut, ini_fields)
-
-
-def set_wifi_mode(dut, mode):
-    TX_MODE_DICT = {
-        "Auto": 0,
-        "11n": 4,
-        "11ac": 9,
-        "11abg": 1,
-        "11b": 2,
-        "11": 3,
-        "11g only": 5,
-        "11n only": 6,
-        "11b only": 7,
-        "11ac only": 8,
-    }
-
-    ini_fields = {
-        "gEnable2x2": 2,
-        "gSetTxChainmask1x1": 1,
-        "gSetRxChainmask1x1": 1,
-        "gDualMacFeatureDisable": 6,
-        "gDot11Mode": TX_MODE_DICT[mode],
-    }
-    _edit_dut_ini(dut, ini_fields)
-
-
-class LinkLayerStats:
-    LLSTATS_CMD = "cat /d/wlan0/ll_stats"
-    PEER_REGEX = "LL_STATS_PEER_ALL"
-    MCS_REGEX = re.compile(
-        r"preamble: (?P<mode>\S+), nss: (?P<num_streams>\S+), bw: (?P<bw>\S+), "
-        "mcs: (?P<mcs>\S+), bitrate: (?P<rate>\S+), txmpdu: (?P<txmpdu>\S+), "
-        "rxmpdu: (?P<rxmpdu>\S+), mpdu_lost: (?P<mpdu_lost>\S+), "
-        "retries: (?P<retries>\S+), retries_short: (?P<retries_short>\S+), "
-        "retries_long: (?P<retries_long>\S+)"
-    )
-    MCS_ID = collections.namedtuple(
-        "mcs_id", ["mode", "num_streams", "bandwidth", "mcs", "rate"]
-    )
-    MODE_MAP = {"0": "11a/g", "1": "11b", "2": "11n", "3": "11ac"}
-    BW_MAP = {"0": 20, "1": 40, "2": 80}
-
-    def __init__(self, dut, llstats_enabled=True):
-        self.dut = dut
-        self.llstats_enabled = llstats_enabled
-        self.llstats_cumulative = self._empty_llstats()
-        self.llstats_incremental = self._empty_llstats()
-
-    def update_stats(self):
-        if self.llstats_enabled:
-            try:
-                llstats_output = self.dut.adb.shell(self.LLSTATS_CMD, timeout=0.1)
-            except:
-                llstats_output = ""
-        else:
-            llstats_output = ""
-        self._update_stats(llstats_output)
-
-    def reset_stats(self):
-        self.llstats_cumulative = self._empty_llstats()
-        self.llstats_incremental = self._empty_llstats()
-
-    def _empty_llstats(self):
-        return collections.OrderedDict(
-            mcs_stats=collections.OrderedDict(), summary=collections.OrderedDict()
-        )
-
-    def _empty_mcs_stat(self):
-        return collections.OrderedDict(
-            txmpdu=0, rxmpdu=0, mpdu_lost=0, retries=0, retries_short=0, retries_long=0
-        )
-
-    def _mcs_id_to_string(self, mcs_id):
-        mcs_string = "{} {}MHz Nss{} MCS{} {}Mbps".format(
-            mcs_id.mode, mcs_id.bandwidth, mcs_id.num_streams, mcs_id.mcs, mcs_id.rate
-        )
-        return mcs_string
-
-    def _parse_mcs_stats(self, llstats_output):
-        llstats_dict = {}
-        # Look for per-peer stats
-        match = re.search(self.PEER_REGEX, llstats_output)
-        if not match:
-            self.reset_stats()
-            return collections.OrderedDict()
-        # Find and process all matches for per stream stats
-        match_iter = re.finditer(self.MCS_REGEX, llstats_output)
-        for match in match_iter:
-            current_mcs = self.MCS_ID(
-                self.MODE_MAP[match.group("mode")],
-                int(match.group("num_streams")) + 1,
-                self.BW_MAP[match.group("bw")],
-                int(match.group("mcs")),
-                int(match.group("rate"), 16) / 1000,
-            )
-            current_stats = collections.OrderedDict(
-                txmpdu=int(match.group("txmpdu")),
-                rxmpdu=int(match.group("rxmpdu")),
-                mpdu_lost=int(match.group("mpdu_lost")),
-                retries=int(match.group("retries")),
-                retries_short=int(match.group("retries_short")),
-                retries_long=int(match.group("retries_long")),
-            )
-            llstats_dict[self._mcs_id_to_string(current_mcs)] = current_stats
-        return llstats_dict
-
-    def _diff_mcs_stats(self, new_stats, old_stats):
-        stats_diff = collections.OrderedDict()
-        for stat_key in new_stats.keys():
-            stats_diff[stat_key] = new_stats[stat_key] - old_stats[stat_key]
-        return stats_diff
-
-    def _generate_stats_summary(self, llstats_dict):
-        llstats_summary = collections.OrderedDict(
-            common_tx_mcs=None,
-            common_tx_mcs_count=0,
-            common_tx_mcs_freq=0,
-            common_rx_mcs=None,
-            common_rx_mcs_count=0,
-            common_rx_mcs_freq=0,
-            rx_per=float("nan"),
-        )
-
-        txmpdu_count = 0
-        rxmpdu_count = 0
-        for mcs_id, mcs_stats in llstats_dict["mcs_stats"].items():
-            if mcs_stats["txmpdu"] > llstats_summary["common_tx_mcs_count"]:
-                llstats_summary["common_tx_mcs"] = mcs_id
-                llstats_summary["common_tx_mcs_count"] = mcs_stats["txmpdu"]
-            if mcs_stats["rxmpdu"] > llstats_summary["common_rx_mcs_count"]:
-                llstats_summary["common_rx_mcs"] = mcs_id
-                llstats_summary["common_rx_mcs_count"] = mcs_stats["rxmpdu"]
-            txmpdu_count += mcs_stats["txmpdu"]
-            rxmpdu_count += mcs_stats["rxmpdu"]
-        if txmpdu_count:
-            llstats_summary["common_tx_mcs_freq"] = (
-                llstats_summary["common_tx_mcs_count"] / txmpdu_count
-            )
-        if rxmpdu_count:
-            llstats_summary["common_rx_mcs_freq"] = (
-                llstats_summary["common_rx_mcs_count"] / rxmpdu_count
-            )
-        return llstats_summary
-
-    def _update_stats(self, llstats_output):
-        # Parse stats
-        new_llstats = self._empty_llstats()
-        new_llstats["mcs_stats"] = self._parse_mcs_stats(llstats_output)
-        # Save old stats and set new cumulative stats
-        old_llstats = self.llstats_cumulative.copy()
-        self.llstats_cumulative = new_llstats.copy()
-        # Compute difference between new and old stats
-        self.llstats_incremental = self._empty_llstats()
-        for mcs_id, new_mcs_stats in new_llstats["mcs_stats"].items():
-            old_mcs_stats = old_llstats["mcs_stats"].get(mcs_id, self._empty_mcs_stat())
-            self.llstats_incremental["mcs_stats"][mcs_id] = self._diff_mcs_stats(
-                new_mcs_stats, old_mcs_stats
-            )
-        # Generate llstats summary
-        self.llstats_incremental["summary"] = self._generate_stats_summary(
-            self.llstats_incremental
-        )
-        self.llstats_cumulative["summary"] = self._generate_stats_summary(
-            self.llstats_cumulative
-        )
diff --git a/src/antlion/test_utils/wifi/wifi_power_test_utils.py b/src/antlion/test_utils/wifi/wifi_power_test_utils.py
deleted file mode 100644
index dba8461..0000000
--- a/src/antlion/test_utils/wifi/wifi_power_test_utils.py
+++ /dev/null
@@ -1,416 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import time
-from antlion import utils
-from antlion.libs.proc import job
-from antlion.controllers.ap_lib import bridge_interface as bi
-from antlion.test_utils.wifi import wifi_test_utils as wutils
-from antlion.controllers.adb_lib.error import AdbCommandError
-from antlion.controllers.ap_lib import hostapd_security
-from antlion.controllers.ap_lib import hostapd_ap_preset
-
-# http://www.secdev.org/projects/scapy/
-# On ubuntu, sudo pip3 install scapy
-import scapy.all as scapy
-
-GET_FROM_PHONE = "get_from_dut"
-GET_FROM_AP = "get_from_ap"
-ENABLED_MODULATED_DTIM = "gEnableModulatedDTIM="
-MAX_MODULATED_DTIM = "gMaxLIModulatedDTIM="
-
-
-def change_dtim(ad, gEnableModulatedDTIM, gMaxLIModulatedDTIM=10):
-    """Function to change the DTIM setting in the phone.
-
-    Args:
-        ad: the target android device, AndroidDevice object
-        gEnableModulatedDTIM: Modulated DTIM, int
-        gMaxLIModulatedDTIM: Maximum modulated DTIM, int
-    """
-    ad.log.info("Sets dtim to {}".format(gEnableModulatedDTIM))
-
-    # In P21 the dtim setting method changed and an AdbCommandError will take
-    # place to get ini_file_phone. Thus add try/except block for the old method.
-    # If error occurs, use change_dtim_adb method later. Otherwise, first trying
-    # to find the ini file with DTIM settings
-    try:
-        ini_file_phone = ad.adb.shell("ls /vendor/firmware/wlan/*/*.ini")
-
-    except AdbCommandError as e:
-        # Gets AdbCommandError, change dtim later with change_dtim_adb merthod.
-        # change_dtim_adb requires that wifi connection is on.
-        ad.log.info("Gets AdbCommandError, change dtim with change_dtim_adb.")
-        change_dtim_adb(ad, gEnableModulatedDTIM)
-        return 0
-
-    ini_file_local = ini_file_phone.split("/")[-1]
-
-    # Pull the file and change the DTIM to desired value
-    ad.adb.pull("{} {}".format(ini_file_phone, ini_file_local))
-
-    with open(ini_file_local, "r") as fin:
-        for line in fin:
-            if ENABLED_MODULATED_DTIM in line:
-                gE_old = line.strip("\n")
-                gEDTIM_old = line.strip(ENABLED_MODULATED_DTIM).strip("\n")
-            if MAX_MODULATED_DTIM in line:
-                gM_old = line.strip("\n")
-                gMDTIM_old = line.strip(MAX_MODULATED_DTIM).strip("\n")
-    fin.close()
-    if (
-        int(gEDTIM_old) == gEnableModulatedDTIM
-        and int(gMDTIM_old) == gMaxLIModulatedDTIM
-    ):
-        ad.log.info("Current DTIM is already the desired value," "no need to reset it")
-        return 0
-
-    gE_new = ENABLED_MODULATED_DTIM + str(gEnableModulatedDTIM)
-    gM_new = MAX_MODULATED_DTIM + str(gMaxLIModulatedDTIM)
-
-    sed_gE = "sed -i 's/{}/{}/g' {}".format(gE_old, gE_new, ini_file_local)
-    sed_gM = "sed -i 's/{}/{}/g' {}".format(gM_old, gM_new, ini_file_local)
-    job.run(sed_gE)
-    job.run(sed_gM)
-
-    # Push the file to the phone
-    push_file_to_phone(ad, ini_file_local, ini_file_phone)
-    ad.log.info("DTIM changes checked in and rebooting...")
-    ad.reboot()
-    # Wait for auto-wifi feature to start
-    time.sleep(20)
-    ad.adb.shell("dumpsys battery set level 100")
-    ad.log.info("DTIM updated and device back from reboot")
-    return 1
-
-
-def change_dtim_adb(ad, gEnableModulatedDTIM):
-    """Function to change the DTIM setting in the P21 phone.
-
-        This method should be run after connecting wifi.
-
-    Args:
-        ad: the target android device, AndroidDevice object
-        gEnableModulatedDTIM: Modulated DTIM, int
-    """
-    ad.log.info("Changes DTIM to {} with adb".format(gEnableModulatedDTIM))
-    ad.adb.root()
-    screen_status = ad.adb.shell("dumpsys nfc | grep Screen")
-    screen_is_on = "ON_UNLOCKED" in screen_status
-
-    # To read the dtim with 'adb shell wl bcn_li_dtim', the screen should be off
-    if screen_is_on:
-        ad.log.info("The screen is on. Set it to off before change dtim")
-        ad.droid.goToSleepNow()
-        time_limit_seconds = 60
-        _wait_screen_off(ad, time_limit_seconds)
-
-    old_dtim = _read_dtim_adb(ad)
-    ad.log.info("The dtim before change is {}".format(old_dtim))
-    try:
-        if int(old_dtim) == gEnableModulatedDTIM:
-            ad.log.info(
-                "Current DTIM is already the desired value," "no need to reset it"
-            )
-            if screen_is_on:
-                ad.log.info("Changes the screen to the original on status")
-                ad.droid.wakeUpNow()
-            return
-    except Exception as e:
-        ad.log.info("old_dtim is not available from adb")
-
-    current_dtim = _set_dtim(ad, gEnableModulatedDTIM)
-    ad.log.info("Old DTIM is {}, current DTIM is {}".format(old_dtim, current_dtim))
-    if screen_is_on:
-        ad.log.info("Changes the screen to the original on status")
-        ad.droid.wakeUpNow()
-
-
-def _set_dtim(ad, gEnableModulatedDTIM):
-    out = ad.adb.shell("halutil -dtim_config {}".format(gEnableModulatedDTIM))
-    ad.log.info("set dtim to {}, stdout: {}".format(gEnableModulatedDTIM, out))
-    return _read_dtim_adb(ad)
-
-
-def _read_dtim_adb(ad):
-    try:
-        old_dtim = ad.adb.shell("wl bcn_li_dtim")
-        return old_dtim
-    except Exception as e:
-        ad.log.info("When reading dtim get error {}".format(e))
-        return "The dtim value is not available from adb"
-
-
-def _wait_screen_off(ad, time_limit_seconds):
-    while time_limit_seconds > 0:
-        screen_status = ad.adb.shell("dumpsys nfc | grep Screen")
-        if "OFF_UNLOCKED" in screen_status:
-            ad.log.info("The screen status is {}".format(screen_status))
-            return
-        time.sleep(1)
-        time_limit_seconds -= 1
-    raise TimeoutError(
-        "Timed out while waiting the screen off after {} "
-        "seconds.".format(time_limit_seconds)
-    )
-
-
-def push_file_to_phone(ad, file_local, file_phone):
-    """Function to push local file to android phone.
-
-    Args:
-        ad: the target android device
-        file_local: the locla file to push
-        file_phone: the file/directory on the phone to be pushed
-    """
-    ad.adb.root()
-    cmd_out = ad.adb.remount()
-    if "Permission denied" in cmd_out:
-        ad.log.info("Need to disable verity first and reboot")
-        ad.adb.disable_verity()
-        time.sleep(1)
-        ad.reboot()
-        ad.log.info("Verity disabled and device back from reboot")
-        ad.adb.root()
-        ad.adb.remount()
-    time.sleep(1)
-    ad.adb.push("{} {}".format(file_local, file_phone))
-
-
-def ap_setup(ap, network, bandwidth=80, dtim_period=None):
-    """Set up the whirlwind AP with provided network info.
-
-    Args:
-        ap: access_point object of the AP
-        network: dict with information of the network, including ssid, password
-                 bssid, channel etc.
-        bandwidth: the operation bandwidth for the AP, default 80MHz
-        dtim_period: the dtim period of access point
-    Returns:
-        brconfigs: the bridge interface configs
-    """
-    log = logging.getLogger()
-    bss_settings = []
-    ssid = network[wutils.WifiEnums.SSID_KEY]
-    if "password" in network.keys():
-        password = network["password"]
-        security = hostapd_security.Security(security_mode="wpa", password=password)
-    else:
-        security = hostapd_security.Security(security_mode=None, password=None)
-    channel = network["channel"]
-    config = hostapd_ap_preset.create_ap_preset(
-        channel=channel,
-        ssid=ssid,
-        dtim_period=dtim_period,
-        security=security,
-        bss_settings=bss_settings,
-        vht_bandwidth=bandwidth,
-        profile_name="whirlwind",
-        iface_wlan_2g=ap.wlan_2g,
-        iface_wlan_5g=ap.wlan_5g,
-    )
-    config_bridge = ap.generate_bridge_configs(channel)
-    brconfigs = bi.BridgeInterfaceConfigs(
-        config_bridge[0], config_bridge[1], config_bridge[2]
-    )
-    ap.bridge.startup(brconfigs)
-    ap.start_ap(config)
-    log.info("AP started on channel {} with SSID {}".format(channel, ssid))
-    return brconfigs
-
-
-def run_iperf_client_nonblocking(ad, server_host, extra_args=""):
-    """Start iperf client on the device with nohup.
-
-    Return status as true if iperf client start successfully.
-    And data flow information as results.
-
-    Args:
-        ad: the android device under test
-        server_host: Address of the iperf server.
-        extra_args: A string representing extra arguments for iperf client,
-            e.g. "-i 1 -t 30".
-
-    """
-    log = logging.getLogger()
-    ad.adb.shell_nb(
-        "nohup >/dev/null 2>&1 sh -c 'iperf3 -c {} {} &'".format(
-            server_host, extra_args
-        )
-    )
-    log.info("IPerf client started")
-
-
-def get_wifi_rssi(ad):
-    """Get the RSSI of the device.
-
-    Args:
-        ad: the android device under test
-    Returns:
-        RSSI: the rssi level of the device
-    """
-    RSSI = ad.droid.wifiGetConnectionInfo()["rssi"]
-    return RSSI
-
-
-def get_phone_ip(ad):
-    """Get the WiFi IP address of the phone.
-
-    Args:
-        ad: the android device under test
-    Returns:
-        IP: IP address of the phone for WiFi, as a string
-    """
-    IP = ad.droid.connectivityGetIPv4Addresses("wlan0")[0]
-
-    return IP
-
-
-def get_phone_mac(ad):
-    """Get the WiFi MAC address of the phone.
-
-    Args:
-        ad: the android device under test
-    Returns:
-        mac: MAC address of the phone for WiFi, as a string
-    """
-    mac = ad.droid.wifiGetConnectionInfo()["mac_address"]
-
-    return mac
-
-
-def get_phone_ipv6(ad):
-    """Get the WiFi IPV6 address of the phone.
-
-    Args:
-        ad: the android device under test
-    Returns:
-        IPv6: IPv6 address of the phone for WiFi, as a string
-    """
-    IPv6 = ad.droid.connectivityGetLinkLocalIpv6Address("wlan0")[:-6]
-
-    return IPv6
-
-
-def wait_for_dhcp(interface_name):
-    """Wait the DHCP address assigned to desired interface.
-
-    Getting DHCP address takes time and the wait time isn't constant. Utilizing
-    utils.timeout to keep trying until success
-
-    Args:
-        interface_name: desired interface name
-    Returns:
-        ip: ip address of the desired interface name
-    Raise:
-        TimeoutError: After timeout, if no DHCP assigned, raise
-    """
-    log = logging.getLogger()
-    reset_host_interface(interface_name)
-    start_time = time.time()
-    time_limit_seconds = 60
-    ip = "0.0.0.0"
-    while start_time + time_limit_seconds > time.time():
-        ip = scapy.get_if_addr(interface_name)
-        if ip == "0.0.0.0":
-            time.sleep(1)
-        else:
-            log.info("DHCP address assigned to %s as %s" % (interface_name, ip))
-            return ip
-    raise TimeoutError(
-        "Timed out while getting if_addr after %s seconds." % time_limit_seconds
-    )
-
-
-def reset_host_interface(intferface_name):
-    """Reset the host interface.
-
-    Args:
-        intferface_name: the desired interface to reset
-    """
-    log = logging.getLogger()
-    intf_down_cmd = "ifconfig %s down" % intferface_name
-    intf_up_cmd = "ifconfig %s up" % intferface_name
-    try:
-        job.run(intf_down_cmd)
-        time.sleep(10)
-        job.run(intf_up_cmd)
-        log.info("{} has been reset".format(intferface_name))
-    except job.Error:
-        raise Exception("No such interface")
-
-
-def bringdown_host_interface(intferface_name):
-    """Reset the host interface.
-
-    Args:
-        intferface_name: the desired interface to reset
-    """
-    log = logging.getLogger()
-    intf_down_cmd = "ifconfig %s down" % intferface_name
-    try:
-        job.run(intf_down_cmd)
-        time.sleep(2)
-        log.info("{} has been brought down".format(intferface_name))
-    except job.Error:
-        raise Exception("No such interface")
-
-
-def create_pkt_config(test_class):
-    """Creates the config for generating multicast packets
-
-    Args:
-        test_class: object with all networking paramters
-
-    Returns:
-        Dictionary with the multicast packet config
-    """
-    addr_type = (
-        scapy.IPV6_ADDR_LINKLOCAL
-        if test_class.ipv6_src_type == "LINK_LOCAL"
-        else scapy.IPV6_ADDR_GLOBAL
-    )
-
-    mac_dst = test_class.mac_dst
-    if GET_FROM_PHONE in test_class.mac_dst:
-        mac_dst = get_phone_mac(test_class.dut)
-
-    ipv4_dst = test_class.ipv4_dst
-    if GET_FROM_PHONE in test_class.ipv4_dst:
-        ipv4_dst = get_phone_ip(test_class.dut)
-
-    ipv6_dst = test_class.ipv6_dst
-    if GET_FROM_PHONE in test_class.ipv6_dst:
-        ipv6_dst = get_phone_ipv6(test_class.dut)
-
-    ipv4_gw = test_class.ipv4_gwt
-    if GET_FROM_AP in test_class.ipv4_gwt:
-        ipv4_gw = test_class.access_point.ssh_settings.hostname
-
-    pkt_gen_config = {
-        "interf": test_class.pkt_sender.interface,
-        "subnet_mask": test_class.sub_mask,
-        "src_mac": test_class.mac_src,
-        "dst_mac": mac_dst,
-        "src_ipv4": test_class.ipv4_src,
-        "dst_ipv4": ipv4_dst,
-        "src_ipv6": test_class.ipv6_src,
-        "src_ipv6_type": addr_type,
-        "dst_ipv6": ipv6_dst,
-        "gw_ipv4": ipv4_gw,
-    }
-    return pkt_gen_config
diff --git a/src/antlion/test_utils/wifi/wifi_test_utils.py b/src/antlion/test_utils/wifi/wifi_test_utils.py
deleted file mode 100755
index 9c04b59..0000000
--- a/src/antlion/test_utils/wifi/wifi_test_utils.py
+++ /dev/null
@@ -1,2999 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import os
-import re
-import shutil
-import time
-
-from collections import namedtuple
-from enum import IntEnum
-from queue import Empty
-
-from tenacity import retry, stop_after_attempt, wait_fixed
-
-from antlion import context
-from antlion import signals
-from antlion import utils
-from antlion.controllers import attenuator
-from antlion.controllers.ap_lib import hostapd_security
-from antlion.controllers.ap_lib import hostapd_ap_preset
-from antlion.controllers.ap_lib.hostapd_constants import BAND_2G
-from antlion.controllers.ap_lib.hostapd_constants import BAND_5G
-from antlion.test_utils.wifi import wifi_constants
-
-from mobly import asserts
-
-# Default timeout used for reboot, toggle WiFi and Airplane mode,
-# for the system to settle down after the operation.
-DEFAULT_TIMEOUT = 10
-# Number of seconds to wait for events that are supposed to happen quickly.
-# Like onSuccess for start background scan and confirmation on wifi state
-# change.
-SHORT_TIMEOUT = 30
-ROAMING_TIMEOUT = 30
-WIFI_CONNECTION_TIMEOUT_DEFAULT = 30
-DEFAULT_SCAN_TRIES = 3
-DEFAULT_CONNECT_TRIES = 3
-# Speed of light in m/s.
-SPEED_OF_LIGHT = 299792458
-
-DEFAULT_PING_ADDR = "https://www.google.com/robots.txt"
-
-CNSS_DIAG_CONFIG_PATH = "/data/vendor/wifi/cnss_diag/"
-CNSS_DIAG_CONFIG_FILE = "cnss_diag.conf"
-
-ROAMING_ATTN = {
-    "AP1_on_AP2_off": [0, 0, 95, 95],
-    "AP1_off_AP2_on": [95, 95, 0, 0],
-    "default": [0, 0, 0, 0],
-}
-
-
-class WifiEnums:
-    SSID_KEY = "SSID"  # Used for Wifi & SoftAp
-    SSID_PATTERN_KEY = "ssidPattern"
-    NETID_KEY = "network_id"
-    BSSID_KEY = "BSSID"  # Used for Wifi & SoftAp
-    BSSID_PATTERN_KEY = "bssidPattern"
-    PWD_KEY = "password"  # Used for Wifi & SoftAp
-    frequency_key = "frequency"
-    HIDDEN_KEY = "hiddenSSID"  # Used for Wifi & SoftAp
-    IS_APP_INTERACTION_REQUIRED = "isAppInteractionRequired"
-    IS_USER_INTERACTION_REQUIRED = "isUserInteractionRequired"
-    IS_SUGGESTION_METERED = "isMetered"
-    PRIORITY = "priority"
-    SECURITY = "security"  # Used for Wifi & SoftAp
-
-    # Used for SoftAp
-    AP_BAND_KEY = "apBand"
-    AP_CHANNEL_KEY = "apChannel"
-    AP_BANDS_KEY = "apBands"
-    AP_CHANNEL_FREQUENCYS_KEY = "apChannelFrequencies"
-    AP_MAC_RANDOMIZATION_SETTING_KEY = "MacRandomizationSetting"
-    AP_BRIDGED_OPPORTUNISTIC_SHUTDOWN_ENABLE_KEY = (
-        "BridgedModeOpportunisticShutdownEnabled"
-    )
-    AP_IEEE80211AX_ENABLED_KEY = "Ieee80211axEnabled"
-    AP_MAXCLIENTS_KEY = "MaxNumberOfClients"
-    AP_SHUTDOWNTIMEOUT_KEY = "ShutdownTimeoutMillis"
-    AP_SHUTDOWNTIMEOUTENABLE_KEY = "AutoShutdownEnabled"
-    AP_CLIENTCONTROL_KEY = "ClientControlByUserEnabled"
-    AP_ALLOWEDLIST_KEY = "AllowedClientList"
-    AP_BLOCKEDLIST_KEY = "BlockedClientList"
-
-    WIFI_CONFIG_SOFTAP_BAND_2G = 1
-    WIFI_CONFIG_SOFTAP_BAND_5G = 2
-    WIFI_CONFIG_SOFTAP_BAND_2G_5G = 3
-    WIFI_CONFIG_SOFTAP_BAND_6G = 4
-    WIFI_CONFIG_SOFTAP_BAND_2G_6G = 5
-    WIFI_CONFIG_SOFTAP_BAND_5G_6G = 6
-    WIFI_CONFIG_SOFTAP_BAND_ANY = 7
-
-    # DO NOT USE IT for new test case! Replaced by WIFI_CONFIG_SOFTAP_BAND_
-    WIFI_CONFIG_APBAND_2G = WIFI_CONFIG_SOFTAP_BAND_2G
-    WIFI_CONFIG_APBAND_5G = WIFI_CONFIG_SOFTAP_BAND_5G
-    WIFI_CONFIG_APBAND_AUTO = WIFI_CONFIG_SOFTAP_BAND_2G_5G
-
-    WIFI_CONFIG_APBAND_2G_OLD = 0
-    WIFI_CONFIG_APBAND_5G_OLD = 1
-    WIFI_CONFIG_APBAND_AUTO_OLD = -1
-
-    WIFI_WPS_INFO_PBC = 0
-    WIFI_WPS_INFO_DISPLAY = 1
-    WIFI_WPS_INFO_KEYPAD = 2
-    WIFI_WPS_INFO_LABEL = 3
-    WIFI_WPS_INFO_INVALID = 4
-
-    class SoftApSecurityType:
-        OPEN = "NONE"
-        WPA2 = "WPA2_PSK"
-        WPA3_SAE_TRANSITION = "WPA3_SAE_TRANSITION"
-        WPA3_SAE = "WPA3_SAE"
-
-    class CountryCode:
-        AUSTRALIA = "AU"
-        CHINA = "CN"
-        GERMANY = "DE"
-        JAPAN = "JP"
-        UK = "GB"
-        US = "US"
-        UNKNOWN = "UNKNOWN"
-
-    # Start of Macros for EAP
-    # EAP types
-    class Eap(IntEnum):
-        NONE = -1
-        PEAP = 0
-        TLS = 1
-        TTLS = 2
-        PWD = 3
-        SIM = 4
-        AKA = 5
-        AKA_PRIME = 6
-        UNAUTH_TLS = 7
-
-    # EAP Phase2 types
-    class EapPhase2(IntEnum):
-        NONE = 0
-        PAP = 1
-        MSCHAP = 2
-        MSCHAPV2 = 3
-        GTC = 4
-
-    class Enterprise:
-        # Enterprise Config Macros
-        EMPTY_VALUE = "NULL"
-        EAP = "eap"
-        PHASE2 = "phase2"
-        IDENTITY = "identity"
-        ANON_IDENTITY = "anonymous_identity"
-        PASSWORD = "password"
-        SUBJECT_MATCH = "subject_match"
-        ALTSUBJECT_MATCH = "altsubject_match"
-        DOM_SUFFIX_MATCH = "domain_suffix_match"
-        CLIENT_CERT = "client_cert"
-        CA_CERT = "ca_cert"
-        ENGINE = "engine"
-        ENGINE_ID = "engine_id"
-        PRIVATE_KEY_ID = "key_id"
-        REALM = "realm"
-        PLMN = "plmn"
-        FQDN = "FQDN"
-        FRIENDLY_NAME = "providerFriendlyName"
-        ROAMING_IDS = "roamingConsortiumIds"
-        OCSP = "ocsp"
-
-    # End of Macros for EAP
-
-    class ScanResult:
-        CHANNEL_WIDTH_20MHZ = 0
-        CHANNEL_WIDTH_40MHZ = 1
-        CHANNEL_WIDTH_80MHZ = 2
-        CHANNEL_WIDTH_160MHZ = 3
-        CHANNEL_WIDTH_80MHZ_PLUS_MHZ = 4
-
-    # Macros for wifi rtt.
-    class RttType(IntEnum):
-        TYPE_ONE_SIDED = 1
-        TYPE_TWO_SIDED = 2
-
-    class RttPeerType(IntEnum):
-        PEER_TYPE_AP = 1
-        PEER_TYPE_STA = 2  # Requires NAN.
-        PEER_P2P_GO = 3
-        PEER_P2P_CLIENT = 4
-        PEER_NAN = 5
-
-    class RttPreamble(IntEnum):
-        PREAMBLE_LEGACY = 0x01
-        PREAMBLE_HT = 0x02
-        PREAMBLE_VHT = 0x04
-
-    class RttBW(IntEnum):
-        BW_5_SUPPORT = 0x01
-        BW_10_SUPPORT = 0x02
-        BW_20_SUPPORT = 0x04
-        BW_40_SUPPORT = 0x08
-        BW_80_SUPPORT = 0x10
-        BW_160_SUPPORT = 0x20
-
-    class Rtt(IntEnum):
-        STATUS_SUCCESS = 0
-        STATUS_FAILURE = 1
-        STATUS_FAIL_NO_RSP = 2
-        STATUS_FAIL_REJECTED = 3
-        STATUS_FAIL_NOT_SCHEDULED_YET = 4
-        STATUS_FAIL_TM_TIMEOUT = 5
-        STATUS_FAIL_AP_ON_DIFF_CHANNEL = 6
-        STATUS_FAIL_NO_CAPABILITY = 7
-        STATUS_ABORTED = 8
-        STATUS_FAIL_INVALID_TS = 9
-        STATUS_FAIL_PROTOCOL = 10
-        STATUS_FAIL_SCHEDULE = 11
-        STATUS_FAIL_BUSY_TRY_LATER = 12
-        STATUS_INVALID_REQ = 13
-        STATUS_NO_WIFI = 14
-        STATUS_FAIL_FTM_PARAM_OVERRIDE = 15
-
-        REASON_UNSPECIFIED = -1
-        REASON_NOT_AVAILABLE = -2
-        REASON_INVALID_LISTENER = -3
-        REASON_INVALID_REQUEST = -4
-
-    class RttParam:
-        device_type = "deviceType"
-        request_type = "requestType"
-        BSSID = "bssid"
-        channel_width = "channelWidth"
-        frequency = "frequency"
-        center_freq0 = "centerFreq0"
-        center_freq1 = "centerFreq1"
-        number_burst = "numberBurst"
-        interval = "interval"
-        num_samples_per_burst = "numSamplesPerBurst"
-        num_retries_per_measurement_frame = "numRetriesPerMeasurementFrame"
-        num_retries_per_FTMR = "numRetriesPerFTMR"
-        lci_request = "LCIRequest"
-        lcr_request = "LCRRequest"
-        burst_timeout = "burstTimeout"
-        preamble = "preamble"
-        bandwidth = "bandwidth"
-        margin = "margin"
-
-    RTT_MARGIN_OF_ERROR = {
-        RttBW.BW_80_SUPPORT: 2,
-        RttBW.BW_40_SUPPORT: 5,
-        RttBW.BW_20_SUPPORT: 5,
-    }
-
-    # Macros as specified in the WifiScanner code.
-    WIFI_BAND_UNSPECIFIED = 0  # not specified
-    WIFI_BAND_24_GHZ = 1  # 2.4 GHz band
-    WIFI_BAND_5_GHZ = 2  # 5 GHz band without DFS channels
-    WIFI_BAND_5_GHZ_DFS_ONLY = 4  # 5 GHz band with DFS channels
-    WIFI_BAND_5_GHZ_WITH_DFS = 6  # 5 GHz band with DFS channels
-    WIFI_BAND_BOTH = 3  # both bands without DFS channels
-    WIFI_BAND_BOTH_WITH_DFS = 7  # both bands with DFS channels
-
-    REPORT_EVENT_AFTER_BUFFER_FULL = 0
-    REPORT_EVENT_AFTER_EACH_SCAN = 1
-    REPORT_EVENT_FULL_SCAN_RESULT = 2
-
-    SCAN_TYPE_LOW_LATENCY = 0
-    SCAN_TYPE_LOW_POWER = 1
-    SCAN_TYPE_HIGH_ACCURACY = 2
-
-    # US Wifi frequencies
-    ALL_2G_FREQUENCIES = [
-        2412,
-        2417,
-        2422,
-        2427,
-        2432,
-        2437,
-        2442,
-        2447,
-        2452,
-        2457,
-        2462,
-    ]
-    DFS_5G_FREQUENCIES = [
-        5260,
-        5280,
-        5300,
-        5320,
-        5500,
-        5520,
-        5540,
-        5560,
-        5580,
-        5600,
-        5620,
-        5640,
-        5660,
-        5680,
-        5700,
-        5720,
-    ]
-    NONE_DFS_5G_FREQUENCIES = [5180, 5200, 5220, 5240, 5745, 5765, 5785, 5805, 5825]
-    ALL_5G_FREQUENCIES = DFS_5G_FREQUENCIES + NONE_DFS_5G_FREQUENCIES
-
-    band_to_frequencies = {
-        WIFI_BAND_24_GHZ: ALL_2G_FREQUENCIES,
-        WIFI_BAND_5_GHZ: NONE_DFS_5G_FREQUENCIES,
-        WIFI_BAND_5_GHZ_DFS_ONLY: DFS_5G_FREQUENCIES,
-        WIFI_BAND_5_GHZ_WITH_DFS: ALL_5G_FREQUENCIES,
-        WIFI_BAND_BOTH: ALL_2G_FREQUENCIES + NONE_DFS_5G_FREQUENCIES,
-        WIFI_BAND_BOTH_WITH_DFS: ALL_5G_FREQUENCIES + ALL_2G_FREQUENCIES,
-    }
-
-    # TODO: add all of the band mapping.
-    softap_band_frequencies = {
-        WIFI_CONFIG_SOFTAP_BAND_2G: ALL_2G_FREQUENCIES,
-        WIFI_CONFIG_SOFTAP_BAND_5G: ALL_5G_FREQUENCIES,
-    }
-
-    # All Wifi frequencies to channels lookup.
-    freq_to_channel = {
-        2412: 1,
-        2417: 2,
-        2422: 3,
-        2427: 4,
-        2432: 5,
-        2437: 6,
-        2442: 7,
-        2447: 8,
-        2452: 9,
-        2457: 10,
-        2462: 11,
-        2467: 12,
-        2472: 13,
-        2484: 14,
-        4915: 183,
-        4920: 184,
-        4925: 185,
-        4935: 187,
-        4940: 188,
-        4945: 189,
-        4960: 192,
-        4980: 196,
-        5035: 7,
-        5040: 8,
-        5045: 9,
-        5055: 11,
-        5060: 12,
-        5080: 16,
-        5170: 34,
-        5180: 36,
-        5190: 38,
-        5200: 40,
-        5210: 42,
-        5220: 44,
-        5230: 46,
-        5240: 48,
-        5260: 52,
-        5280: 56,
-        5300: 60,
-        5320: 64,
-        5500: 100,
-        5520: 104,
-        5540: 108,
-        5560: 112,
-        5580: 116,
-        5600: 120,
-        5620: 124,
-        5640: 128,
-        5660: 132,
-        5680: 136,
-        5700: 140,
-        5745: 149,
-        5765: 153,
-        5785: 157,
-        5795: 159,
-        5805: 161,
-        5825: 165,
-    }
-
-    # All Wifi channels to frequencies lookup.
-    channel_2G_to_freq = {
-        1: 2412,
-        2: 2417,
-        3: 2422,
-        4: 2427,
-        5: 2432,
-        6: 2437,
-        7: 2442,
-        8: 2447,
-        9: 2452,
-        10: 2457,
-        11: 2462,
-        12: 2467,
-        13: 2472,
-        14: 2484,
-    }
-
-    channel_5G_to_freq = {
-        183: 4915,
-        184: 4920,
-        185: 4925,
-        187: 4935,
-        188: 4940,
-        189: 4945,
-        192: 4960,
-        196: 4980,
-        7: 5035,
-        8: 5040,
-        9: 5045,
-        11: 5055,
-        12: 5060,
-        16: 5080,
-        34: 5170,
-        36: 5180,
-        38: 5190,
-        40: 5200,
-        42: 5210,
-        44: 5220,
-        46: 5230,
-        48: 5240,
-        50: 5250,
-        52: 5260,
-        56: 5280,
-        60: 5300,
-        64: 5320,
-        100: 5500,
-        104: 5520,
-        108: 5540,
-        112: 5560,
-        116: 5580,
-        120: 5600,
-        124: 5620,
-        128: 5640,
-        132: 5660,
-        136: 5680,
-        140: 5700,
-        149: 5745,
-        151: 5755,
-        153: 5765,
-        155: 5775,
-        157: 5785,
-        159: 5795,
-        161: 5805,
-        165: 5825,
-    }
-
-    channel_6G_to_freq = {4 * x + 1: 5955 + 20 * x for x in range(59)}
-
-    channel_to_freq = {
-        "2G": channel_2G_to_freq,
-        "5G": channel_5G_to_freq,
-        "6G": channel_6G_to_freq,
-    }
-
-
-class WifiChannelBase:
-    ALL_2G_FREQUENCIES = []
-    DFS_5G_FREQUENCIES = []
-    NONE_DFS_5G_FREQUENCIES = []
-    ALL_5G_FREQUENCIES = DFS_5G_FREQUENCIES + NONE_DFS_5G_FREQUENCIES
-    MIX_CHANNEL_SCAN = []
-
-    def band_to_freq(self, band):
-        _band_to_frequencies = {
-            WifiEnums.WIFI_BAND_24_GHZ: self.ALL_2G_FREQUENCIES,
-            WifiEnums.WIFI_BAND_5_GHZ: self.NONE_DFS_5G_FREQUENCIES,
-            WifiEnums.WIFI_BAND_5_GHZ_DFS_ONLY: self.DFS_5G_FREQUENCIES,
-            WifiEnums.WIFI_BAND_5_GHZ_WITH_DFS: self.ALL_5G_FREQUENCIES,
-            WifiEnums.WIFI_BAND_BOTH: self.ALL_2G_FREQUENCIES
-            + self.NONE_DFS_5G_FREQUENCIES,
-            WifiEnums.WIFI_BAND_BOTH_WITH_DFS: self.ALL_5G_FREQUENCIES
-            + self.ALL_2G_FREQUENCIES,
-        }
-        return _band_to_frequencies[band]
-
-
-class WifiChannelUS(WifiChannelBase):
-    # US Wifi frequencies
-    ALL_2G_FREQUENCIES = [
-        2412,
-        2417,
-        2422,
-        2427,
-        2432,
-        2437,
-        2442,
-        2447,
-        2452,
-        2457,
-        2462,
-    ]
-    NONE_DFS_5G_FREQUENCIES = [5180, 5200, 5220, 5240, 5745, 5765, 5785, 5805, 5825]
-    MIX_CHANNEL_SCAN = [
-        2412,
-        2437,
-        2462,
-        5180,
-        5200,
-        5280,
-        5260,
-        5300,
-        5500,
-        5320,
-        5520,
-        5560,
-        5700,
-        5745,
-        5805,
-    ]
-
-    def __init__(self, model=None, support_addition_channel=[]):
-        if model in support_addition_channel:
-            self.ALL_2G_FREQUENCIES = [
-                2412,
-                2417,
-                2422,
-                2427,
-                2432,
-                2437,
-                2442,
-                2447,
-                2452,
-                2457,
-                2462,
-                2467,
-                2472,
-            ]
-        self.DFS_5G_FREQUENCIES = [
-            5260,
-            5280,
-            5300,
-            5320,
-            5500,
-            5520,
-            5540,
-            5560,
-            5580,
-            5600,
-            5620,
-            5640,
-            5660,
-            5680,
-            5700,
-            5720,
-        ]
-        self.ALL_5G_FREQUENCIES = self.DFS_5G_FREQUENCIES + self.NONE_DFS_5G_FREQUENCIES
-
-
-class WifiReferenceNetworks:
-    """Class to parse and return networks of different band and
-    auth type from reference_networks
-    """
-
-    def __init__(self, obj):
-        self.reference_networks = obj
-        self.WIFI_2G = "2g"
-        self.WIFI_5G = "5g"
-
-        self.secure_networks_2g = []
-        self.secure_networks_5g = []
-        self.open_networks_2g = []
-        self.open_networks_5g = []
-        self._parse_networks()
-
-    def _parse_networks(self):
-        for network in self.reference_networks:
-            for key in network:
-                if key == self.WIFI_2G:
-                    if "password" in network[key]:
-                        self.secure_networks_2g.append(network[key])
-                    else:
-                        self.open_networks_2g.append(network[key])
-                else:
-                    if "password" in network[key]:
-                        self.secure_networks_5g.append(network[key])
-                    else:
-                        self.open_networks_5g.append(network[key])
-
-    def return_2g_secure_networks(self):
-        return self.secure_networks_2g
-
-    def return_5g_secure_networks(self):
-        return self.secure_networks_5g
-
-    def return_2g_open_networks(self):
-        return self.open_networks_2g
-
-    def return_5g_open_networks(self):
-        return self.open_networks_5g
-
-    def return_secure_networks(self):
-        return self.secure_networks_2g + self.secure_networks_5g
-
-    def return_open_networks(self):
-        return self.open_networks_2g + self.open_networks_5g
-
-
-def _assert_on_fail_handler(func, assert_on_fail, *args, **kwargs):
-    """Wrapper function that handles the bahevior of assert_on_fail.
-
-    When assert_on_fail is True, let all test signals through, which can
-    terminate test cases directly. When assert_on_fail is False, the wrapper
-    raises no test signals and reports operation status by returning True or
-    False.
-
-    Args:
-        func: The function to wrap. This function reports operation status by
-              raising test signals.
-        assert_on_fail: A boolean that specifies if the output of the wrapper
-                        is test signal based or return value based.
-        args: Positional args for func.
-        kwargs: Name args for func.
-
-    Returns:
-        If assert_on_fail is True, returns True/False to signal operation
-        status, otherwise return nothing.
-    """
-    try:
-        func(*args, **kwargs)
-        if not assert_on_fail:
-            return True
-    except signals.TestSignal:
-        if assert_on_fail:
-            raise
-        return False
-
-
-def assert_network_in_list(target, network_list):
-    """Makes sure a specified target Wi-Fi network exists in a list of Wi-Fi
-    networks.
-
-    Args:
-        target: A dict representing a Wi-Fi network.
-                E.g. {WifiEnums.SSID_KEY: "SomeNetwork"}
-        network_list: A list of dicts, each representing a Wi-Fi network.
-    """
-    match_results = match_networks(target, network_list)
-    asserts.assert_true(
-        match_results,
-        "Target network %s, does not exist in network list %s" % (target, network_list),
-    )
-
-
-def match_networks(target_params, networks):
-    """Finds the WiFi networks that match a given set of parameters in a list
-    of WiFi networks.
-
-    To be considered a match, the network should contain every key-value pair
-    of target_params
-
-    Args:
-        target_params: A dict with 1 or more key-value pairs representing a Wi-Fi network.
-                       E.g { 'SSID': 'wh_ap1_5g', 'BSSID': '30:b5:c2:33:e4:47' }
-        networks: A list of dict objects representing WiFi networks.
-
-    Returns:
-        The networks that match the target parameters.
-    """
-    results = []
-    asserts.assert_true(
-        target_params, "Expected networks object 'target_params' is empty"
-    )
-    for n in networks:
-        add_network = 1
-        for k, v in target_params.items():
-            if k not in n:
-                add_network = 0
-                break
-            if n[k] != v:
-                add_network = 0
-                break
-        if add_network:
-            results.append(n)
-    return results
-
-
-def wait_for_wifi_state(ad, state, assert_on_fail=True):
-    """Waits for the device to transition to the specified wifi state
-
-    Args:
-        ad: An AndroidDevice object.
-        state: Wifi state to wait for.
-        assert_on_fail: If True, error checks in this function will raise test
-                        failure signals.
-
-    Returns:
-        If assert_on_fail is False, function returns True if the device transitions
-        to the specified state, False otherwise. If assert_on_fail is True, no return value.
-    """
-    return _assert_on_fail_handler(
-        _wait_for_wifi_state, assert_on_fail, ad, state=state
-    )
-
-
-def _wait_for_wifi_state(ad, state):
-    """Toggles the state of wifi.
-
-    TestFailure signals are raised when something goes wrong.
-
-    Args:
-        ad: An AndroidDevice object.
-        state: Wifi state to wait for.
-    """
-    if state == ad.droid.wifiCheckState():
-        # Check if the state is already achieved, so we don't wait for the
-        # state change event by mistake.
-        return
-    ad.droid.wifiStartTrackingStateChange()
-    fail_msg = "Device did not transition to Wi-Fi state to %s on %s." % (
-        state,
-        ad.serial,
-    )
-    try:
-        ad.ed.wait_for_event(
-            wifi_constants.WIFI_STATE_CHANGED,
-            lambda x: x["data"]["enabled"] == state,
-            SHORT_TIMEOUT,
-        )
-    except Empty:
-        asserts.assert_equal(state, ad.droid.wifiCheckState(), fail_msg)
-    finally:
-        ad.droid.wifiStopTrackingStateChange()
-
-
-def wifi_toggle_state(ad, new_state=None, assert_on_fail=True):
-    """Toggles the state of wifi.
-
-    Args:
-        ad: An AndroidDevice object.
-        new_state: Wifi state to set to. If None, opposite of the current state.
-        assert_on_fail: If True, error checks in this function will raise test
-                        failure signals.
-
-    Returns:
-        If assert_on_fail is False, function returns True if the toggle was
-        successful, False otherwise. If assert_on_fail is True, no return value.
-    """
-    return _assert_on_fail_handler(
-        _wifi_toggle_state, assert_on_fail, ad, new_state=new_state
-    )
-
-
-def _wifi_toggle_state(ad, new_state=None):
-    """Toggles the state of wifi.
-
-    TestFailure signals are raised when something goes wrong.
-
-    Args:
-        ad: An AndroidDevice object.
-        new_state: The state to set Wi-Fi to. If None, opposite of the current
-                   state will be set.
-    """
-    if new_state is None:
-        new_state = not ad.droid.wifiCheckState()
-    elif new_state == ad.droid.wifiCheckState():
-        # Check if the new_state is already achieved, so we don't wait for the
-        # state change event by mistake.
-        return
-    ad.droid.wifiStartTrackingStateChange()
-    ad.log.info("Setting Wi-Fi state to %s.", new_state)
-    ad.ed.clear_all_events()
-    # Setting wifi state.
-    ad.droid.wifiToggleState(new_state)
-    time.sleep(2)
-    fail_msg = "Failed to set Wi-Fi state to %s on %s." % (new_state, ad.serial)
-    try:
-        ad.ed.wait_for_event(
-            wifi_constants.WIFI_STATE_CHANGED,
-            lambda x: x["data"]["enabled"] == new_state,
-            SHORT_TIMEOUT,
-        )
-    except Empty:
-        asserts.assert_equal(new_state, ad.droid.wifiCheckState(), fail_msg)
-    finally:
-        ad.droid.wifiStopTrackingStateChange()
-
-
-def reset_wifi(ad):
-    """Clears all saved Wi-Fi networks on a device.
-
-    This will turn Wi-Fi on.
-
-    Args:
-        ad: An AndroidDevice object.
-
-    """
-    networks = ad.droid.wifiGetConfiguredNetworks()
-    if not networks:
-        return
-    removed = []
-    for n in networks:
-        if n["networkId"] not in removed:
-            ad.droid.wifiForgetNetwork(n["networkId"])
-            removed.append(n["networkId"])
-        else:
-            continue
-        try:
-            event = ad.ed.pop_event(
-                wifi_constants.WIFI_FORGET_NW_SUCCESS, SHORT_TIMEOUT
-            )
-        except Empty:
-            logging.warning("Could not confirm the removal of network %s.", n)
-    # Check again to see if there's any network left.
-    asserts.assert_true(
-        not ad.droid.wifiGetConfiguredNetworks(),
-        "Failed to remove these configured Wi-Fi networks: %s" % networks,
-    )
-
-
-def toggle_airplane_mode_on_and_off(ad):
-    """Turn ON and OFF Airplane mode.
-
-    ad: An AndroidDevice object.
-    Returns: Assert if turning on/off Airplane mode fails.
-
-    """
-    ad.log.debug("Toggling Airplane mode ON.")
-    asserts.assert_true(
-        utils.force_airplane_mode(ad, True),
-        "Can not turn on airplane mode on: %s" % ad.serial,
-    )
-    time.sleep(DEFAULT_TIMEOUT)
-    ad.log.debug("Toggling Airplane mode OFF.")
-    asserts.assert_true(
-        utils.force_airplane_mode(ad, False),
-        "Can not turn on airplane mode on: %s" % ad.serial,
-    )
-    time.sleep(DEFAULT_TIMEOUT)
-
-
-def toggle_wifi_off_and_on(ad):
-    """Turn OFF and ON WiFi.
-
-    ad: An AndroidDevice object.
-    Returns: Assert if turning off/on WiFi fails.
-
-    """
-    ad.log.debug("Toggling wifi OFF.")
-    wifi_toggle_state(ad, False)
-    time.sleep(DEFAULT_TIMEOUT)
-    ad.log.debug("Toggling wifi ON.")
-    wifi_toggle_state(ad, True)
-    time.sleep(DEFAULT_TIMEOUT)
-
-
-def wifi_forget_network(ad, net_ssid):
-    """Remove configured Wifi network on an android device.
-
-    Args:
-        ad: android_device object for forget network.
-        net_ssid: ssid of network to be forget
-
-    """
-    networks = ad.droid.wifiGetConfiguredNetworks()
-    if not networks:
-        return
-    removed = []
-    for n in networks:
-        if net_ssid in n[WifiEnums.SSID_KEY] and n["networkId"] not in removed:
-            ad.droid.wifiForgetNetwork(n["networkId"])
-            removed.append(n["networkId"])
-            try:
-                event = ad.ed.pop_event(
-                    wifi_constants.WIFI_FORGET_NW_SUCCESS, SHORT_TIMEOUT
-                )
-            except Empty:
-                asserts.fail("Failed to remove network %s." % n)
-            break
-
-
-def wifi_test_device_init(ad, country_code=WifiEnums.CountryCode.US):
-    """Initializes an android device for wifi testing.
-
-    0. Make sure SL4A connection is established on the android device.
-    1. Disable location service's WiFi scan.
-    2. Turn WiFi on.
-    3. Clear all saved networks.
-    4. Set country code to US.
-    5. Enable WiFi verbose logging.
-    6. Sync device time with computer time.
-    7. Turn off cellular data.
-    8. Turn off ambient display.
-    """
-    utils.require_sl4a((ad,))
-    ad.droid.wifiScannerToggleAlwaysAvailable(False)
-    msg = "Failed to turn off location service's scan."
-    asserts.assert_true(not ad.droid.wifiScannerIsAlwaysAvailable(), msg)
-    wifi_toggle_state(ad, True)
-    reset_wifi(ad)
-    ad.droid.wifiEnableVerboseLogging(1)
-    msg = "Failed to enable WiFi verbose logging."
-    asserts.assert_equal(ad.droid.wifiGetVerboseLoggingLevel(), 1, msg)
-    # We don't verify the following settings since they are not critical.
-    # Set wpa_supplicant log level to EXCESSIVE.
-    output = ad.adb.shell(
-        "wpa_cli -i wlan0 -p -g@android:wpa_wlan0 IFNAME=" "wlan0 log_level EXCESSIVE",
-        ignore_status=True,
-    )
-    ad.log.info("wpa_supplicant log change status: %s", output)
-    utils.sync_device_time(ad)
-    ad.droid.telephonyToggleDataConnection(False)
-    set_wifi_country_code(ad, country_code)
-    utils.set_ambient_display(ad, False)
-
-
-def set_wifi_country_code(ad, country_code):
-    """Sets the wifi country code on the device.
-
-    Args:
-        ad: An AndroidDevice object.
-        country_code: 2 letter ISO country code
-
-    Raises:
-        An RpcException if unable to set the country code.
-    """
-    try:
-        ad.adb.shell("cmd wifi force-country-code enabled %s" % country_code)
-    except Exception as e:
-        ad.droid.wifiSetCountryCode(WifiEnums.CountryCode.US)
-
-
-def start_wifi_connection_scan(ad):
-    """Starts a wifi connection scan and wait for results to become available.
-
-    Args:
-        ad: An AndroidDevice object.
-    """
-    ad.ed.clear_all_events()
-    ad.droid.wifiStartScan()
-    try:
-        ad.ed.pop_event("WifiManagerScanResultsAvailable", 60)
-    except Empty:
-        asserts.fail("Wi-Fi results did not become available within 60s.")
-
-
-def start_wifi_connection_scan_and_return_status(ad):
-    """
-    Starts a wifi connection scan and wait for results to become available
-    or a scan failure to be reported.
-
-    Args:
-        ad: An AndroidDevice object.
-    Returns:
-        True: if scan succeeded & results are available
-        False: if scan failed
-    """
-    ad.ed.clear_all_events()
-    ad.droid.wifiStartScan()
-    try:
-        events = ad.ed.pop_events("WifiManagerScan(ResultsAvailable|Failure)", 60)
-    except Empty:
-        asserts.fail("Wi-Fi scan results/failure did not become available within 60s.")
-    # If there are multiple matches, we check for atleast one success.
-    for event in events:
-        if event["name"] == "WifiManagerScanResultsAvailable":
-            return True
-        elif event["name"] == "WifiManagerScanFailure":
-            ad.log.debug("Scan failure received")
-    return False
-
-
-def start_wifi_connection_scan_and_check_for_network(ad, network_ssid, max_tries=3):
-    """
-    Start connectivity scans & checks if the |network_ssid| is seen in
-    scan results. The method performs a max of |max_tries| connectivity scans
-    to find the network.
-
-    Args:
-        ad: An AndroidDevice object.
-        network_ssid: SSID of the network we are looking for.
-        max_tries: Number of scans to try.
-    Returns:
-        True: if network_ssid is found in scan results.
-        False: if network_ssid is not found in scan results.
-    """
-    start_time = time.time()
-    for num_tries in range(max_tries):
-        if start_wifi_connection_scan_and_return_status(ad):
-            scan_results = ad.droid.wifiGetScanResults()
-            match_results = match_networks(
-                {WifiEnums.SSID_KEY: network_ssid}, scan_results
-            )
-            if len(match_results) > 0:
-                ad.log.debug(
-                    "Found network in %s seconds." % (time.time() - start_time)
-                )
-                return True
-    ad.log.debug("Did not find network in %s seconds." % (time.time() - start_time))
-    return False
-
-
-def start_wifi_connection_scan_and_ensure_network_found(ad, network_ssid, max_tries=3):
-    """
-    Start connectivity scans & ensure the |network_ssid| is seen in
-    scan results. The method performs a max of |max_tries| connectivity scans
-    to find the network.
-    This method asserts on failure!
-
-    Args:
-        ad: An AndroidDevice object.
-        network_ssid: SSID of the network we are looking for.
-        max_tries: Number of scans to try.
-    """
-    ad.log.info("Starting scans to ensure %s is present", network_ssid)
-    assert_msg = (
-        "Failed to find " + network_ssid + " in scan results"
-        " after " + str(max_tries) + " tries"
-    )
-    asserts.assert_true(
-        start_wifi_connection_scan_and_check_for_network(ad, network_ssid, max_tries),
-        assert_msg,
-    )
-
-
-def start_wifi_connection_scan_and_ensure_network_not_found(
-    ad, network_ssid, max_tries=3
-):
-    """
-    Start connectivity scans & ensure the |network_ssid| is not seen in
-    scan results. The method performs a max of |max_tries| connectivity scans
-    to find the network.
-    This method asserts on failure!
-
-    Args:
-        ad: An AndroidDevice object.
-        network_ssid: SSID of the network we are looking for.
-        max_tries: Number of scans to try.
-    """
-    ad.log.info("Starting scans to ensure %s is not present", network_ssid)
-    assert_msg = (
-        "Found " + network_ssid + " in scan results"
-        " after " + str(max_tries) + " tries"
-    )
-    asserts.assert_false(
-        start_wifi_connection_scan_and_check_for_network(ad, network_ssid, max_tries),
-        assert_msg,
-    )
-
-
-def start_wifi_background_scan(ad, scan_setting):
-    """Starts wifi background scan.
-
-    Args:
-        ad: android_device object to initiate connection on.
-        scan_setting: A dict representing the settings of the scan.
-
-    Returns:
-        If scan was started successfully, event data of success event is returned.
-    """
-    idx = ad.droid.wifiScannerStartBackgroundScan(scan_setting)
-    event = ad.ed.pop_event("WifiScannerScan{}onSuccess".format(idx), SHORT_TIMEOUT)
-    return event["data"]
-
-
-def save_wifi_soft_ap_config(
-    ad,
-    wifi_config,
-    band=None,
-    hidden=None,
-    security=None,
-    password=None,
-    channel=None,
-    max_clients=None,
-    shutdown_timeout_enable=None,
-    shutdown_timeout_millis=None,
-    client_control_enable=None,
-    allowedList=None,
-    blockedList=None,
-    bands=None,
-    channel_frequencys=None,
-    mac_randomization_setting=None,
-    bridged_opportunistic_shutdown_enabled=None,
-    ieee80211ax_enabled=None,
-):
-    """Save a soft ap configuration and verified
-    Args:
-        ad: android_device to set soft ap configuration.
-        wifi_config: a soft ap configuration object, at least include SSID.
-        band: specifies the band for the soft ap.
-        hidden: specifies the soft ap need to broadcast its SSID or not.
-        security: specifies the security type for the soft ap.
-        password: specifies the password for the soft ap.
-        channel: specifies the channel for the soft ap.
-        max_clients: specifies the maximum connected client number.
-        shutdown_timeout_enable: specifies the auto shut down enable or not.
-        shutdown_timeout_millis: specifies the shut down timeout value.
-        client_control_enable: specifies the client control enable or not.
-        allowedList: specifies allowed clients list.
-        blockedList: specifies blocked clients list.
-        bands: specifies the band list for the soft ap.
-        channel_frequencys: specifies the channel frequency list for soft ap.
-        mac_randomization_setting: specifies the mac randomization setting.
-        bridged_opportunistic_shutdown_enabled: specifies the opportunistic
-                shutdown enable or not.
-        ieee80211ax_enabled: specifies the ieee80211ax enable or not.
-    """
-    if security and password:
-        wifi_config[WifiEnums.SECURITY] = security
-        wifi_config[WifiEnums.PWD_KEY] = password
-    if hidden is not None:
-        wifi_config[WifiEnums.HIDDEN_KEY] = hidden
-    if max_clients is not None:
-        wifi_config[WifiEnums.AP_MAXCLIENTS_KEY] = max_clients
-    if shutdown_timeout_enable is not None:
-        wifi_config[WifiEnums.AP_SHUTDOWNTIMEOUTENABLE_KEY] = shutdown_timeout_enable
-    if shutdown_timeout_millis is not None:
-        wifi_config[WifiEnums.AP_SHUTDOWNTIMEOUT_KEY] = shutdown_timeout_millis
-    if client_control_enable is not None:
-        wifi_config[WifiEnums.AP_CLIENTCONTROL_KEY] = client_control_enable
-    if allowedList is not None:
-        wifi_config[WifiEnums.AP_ALLOWEDLIST_KEY] = allowedList
-    if blockedList is not None:
-        wifi_config[WifiEnums.AP_BLOCKEDLIST_KEY] = blockedList
-    if mac_randomization_setting is not None:
-        wifi_config[
-            WifiEnums.AP_MAC_RANDOMIZATION_SETTING_KEY
-        ] = mac_randomization_setting
-    if bridged_opportunistic_shutdown_enabled is not None:
-        wifi_config[
-            WifiEnums.AP_BRIDGED_OPPORTUNISTIC_SHUTDOWN_ENABLE_KEY
-        ] = bridged_opportunistic_shutdown_enabled
-    if ieee80211ax_enabled is not None:
-        wifi_config[WifiEnums.AP_IEEE80211AX_ENABLED_KEY] = ieee80211ax_enabled
-    if channel_frequencys is not None:
-        wifi_config[WifiEnums.AP_CHANNEL_FREQUENCYS_KEY] = channel_frequencys
-    elif bands is not None:
-        wifi_config[WifiEnums.AP_BANDS_KEY] = bands
-    elif band is not None:
-        if channel is not None:
-            wifi_config[WifiEnums.AP_BAND_KEY] = band
-            wifi_config[WifiEnums.AP_CHANNEL_KEY] = channel
-        else:
-            wifi_config[WifiEnums.AP_BAND_KEY] = band
-
-    if (
-        WifiEnums.AP_CHANNEL_KEY in wifi_config
-        and wifi_config[WifiEnums.AP_CHANNEL_KEY] == 0
-    ):
-        del wifi_config[WifiEnums.AP_CHANNEL_KEY]
-
-    if (
-        WifiEnums.SECURITY in wifi_config
-        and wifi_config[WifiEnums.SECURITY] == WifiEnums.SoftApSecurityType.OPEN
-    ):
-        del wifi_config[WifiEnums.SECURITY]
-        del wifi_config[WifiEnums.PWD_KEY]
-
-    asserts.assert_true(
-        ad.droid.wifiSetWifiApConfiguration(wifi_config),
-        "Failed to set WifiAp Configuration",
-    )
-
-    wifi_ap = ad.droid.wifiGetApConfiguration()
-    asserts.assert_true(
-        wifi_ap[WifiEnums.SSID_KEY] == wifi_config[WifiEnums.SSID_KEY],
-        "Hotspot SSID doesn't match",
-    )
-    if WifiEnums.SECURITY in wifi_config:
-        asserts.assert_true(
-            wifi_ap[WifiEnums.SECURITY] == wifi_config[WifiEnums.SECURITY],
-            "Hotspot Security doesn't match",
-        )
-    if WifiEnums.PWD_KEY in wifi_config:
-        asserts.assert_true(
-            wifi_ap[WifiEnums.PWD_KEY] == wifi_config[WifiEnums.PWD_KEY],
-            "Hotspot Password doesn't match",
-        )
-
-    if WifiEnums.HIDDEN_KEY in wifi_config:
-        asserts.assert_true(
-            wifi_ap[WifiEnums.HIDDEN_KEY] == wifi_config[WifiEnums.HIDDEN_KEY],
-            "Hotspot hidden setting doesn't match",
-        )
-
-    if WifiEnums.AP_CHANNEL_KEY in wifi_config:
-        asserts.assert_true(
-            wifi_ap[WifiEnums.AP_CHANNEL_KEY] == wifi_config[WifiEnums.AP_CHANNEL_KEY],
-            "Hotspot Channel doesn't match",
-        )
-    if WifiEnums.AP_MAXCLIENTS_KEY in wifi_config:
-        asserts.assert_true(
-            wifi_ap[WifiEnums.AP_MAXCLIENTS_KEY]
-            == wifi_config[WifiEnums.AP_MAXCLIENTS_KEY],
-            "Hotspot Max Clients doesn't match",
-        )
-    if WifiEnums.AP_SHUTDOWNTIMEOUTENABLE_KEY in wifi_config:
-        asserts.assert_true(
-            wifi_ap[WifiEnums.AP_SHUTDOWNTIMEOUTENABLE_KEY]
-            == wifi_config[WifiEnums.AP_SHUTDOWNTIMEOUTENABLE_KEY],
-            "Hotspot ShutDown feature flag doesn't match",
-        )
-    if WifiEnums.AP_SHUTDOWNTIMEOUT_KEY in wifi_config:
-        asserts.assert_true(
-            wifi_ap[WifiEnums.AP_SHUTDOWNTIMEOUT_KEY]
-            == wifi_config[WifiEnums.AP_SHUTDOWNTIMEOUT_KEY],
-            "Hotspot ShutDown timeout setting doesn't match",
-        )
-    if WifiEnums.AP_CLIENTCONTROL_KEY in wifi_config:
-        asserts.assert_true(
-            wifi_ap[WifiEnums.AP_CLIENTCONTROL_KEY]
-            == wifi_config[WifiEnums.AP_CLIENTCONTROL_KEY],
-            "Hotspot Client control flag doesn't match",
-        )
-    if WifiEnums.AP_ALLOWEDLIST_KEY in wifi_config:
-        asserts.assert_true(
-            wifi_ap[WifiEnums.AP_ALLOWEDLIST_KEY]
-            == wifi_config[WifiEnums.AP_ALLOWEDLIST_KEY],
-            "Hotspot Allowed List doesn't match",
-        )
-    if WifiEnums.AP_BLOCKEDLIST_KEY in wifi_config:
-        asserts.assert_true(
-            wifi_ap[WifiEnums.AP_BLOCKEDLIST_KEY]
-            == wifi_config[WifiEnums.AP_BLOCKEDLIST_KEY],
-            "Hotspot Blocked List doesn't match",
-        )
-
-    if WifiEnums.AP_MAC_RANDOMIZATION_SETTING_KEY in wifi_config:
-        asserts.assert_true(
-            wifi_ap[WifiEnums.AP_MAC_RANDOMIZATION_SETTING_KEY]
-            == wifi_config[WifiEnums.AP_MAC_RANDOMIZATION_SETTING_KEY],
-            "Hotspot Mac randomization setting doesn't match",
-        )
-
-    if WifiEnums.AP_BRIDGED_OPPORTUNISTIC_SHUTDOWN_ENABLE_KEY in wifi_config:
-        asserts.assert_true(
-            wifi_ap[WifiEnums.AP_BRIDGED_OPPORTUNISTIC_SHUTDOWN_ENABLE_KEY]
-            == wifi_config[WifiEnums.AP_BRIDGED_OPPORTUNISTIC_SHUTDOWN_ENABLE_KEY],
-            "Hotspot bridged shutdown enable setting doesn't match",
-        )
-
-    if WifiEnums.AP_IEEE80211AX_ENABLED_KEY in wifi_config:
-        asserts.assert_true(
-            wifi_ap[WifiEnums.AP_IEEE80211AX_ENABLED_KEY]
-            == wifi_config[WifiEnums.AP_IEEE80211AX_ENABLED_KEY],
-            "Hotspot 80211 AX enable setting doesn't match",
-        )
-
-    if WifiEnums.AP_CHANNEL_FREQUENCYS_KEY in wifi_config:
-        asserts.assert_true(
-            wifi_ap[WifiEnums.AP_CHANNEL_FREQUENCYS_KEY]
-            == wifi_config[WifiEnums.AP_CHANNEL_FREQUENCYS_KEY],
-            "Hotspot channels setting doesn't match",
-        )
-
-
-def toggle_wifi_and_wait_for_reconnection(
-    ad, network, num_of_tries=1, assert_on_fail=True
-):
-    """Toggle wifi state and then wait for Android device to reconnect to
-    the provided wifi network.
-
-    This expects the device to be already connected to the provided network.
-
-    Logic steps are
-     1. Ensure that we're connected to the network.
-     2. Turn wifi off.
-     3. Wait for 10 seconds.
-     4. Turn wifi on.
-     5. Wait for the "connected" event, then confirm the connected ssid is the
-        one requested.
-
-    Args:
-        ad: android_device object to initiate connection on.
-        network: A dictionary representing the network to await connection. The
-                 dictionary must have the key "SSID".
-        num_of_tries: An integer that is the number of times to try before
-                      delaring failure. Default is 1.
-        assert_on_fail: If True, error checks in this function will raise test
-                        failure signals.
-
-    Returns:
-        If assert_on_fail is False, function returns True if the toggle was
-        successful, False otherwise. If assert_on_fail is True, no return value.
-    """
-    return _assert_on_fail_handler(
-        _toggle_wifi_and_wait_for_reconnection,
-        assert_on_fail,
-        ad,
-        network,
-        num_of_tries=num_of_tries,
-    )
-
-
-def _toggle_wifi_and_wait_for_reconnection(ad, network, num_of_tries=3):
-    """Toggle wifi state and then wait for Android device to reconnect to
-    the provided wifi network.
-
-    This expects the device to be already connected to the provided network.
-
-    Logic steps are
-     1. Ensure that we're connected to the network.
-     2. Turn wifi off.
-     3. Wait for 10 seconds.
-     4. Turn wifi on.
-     5. Wait for the "connected" event, then confirm the connected ssid is the
-        one requested.
-
-    This will directly fail a test if anything goes wrong.
-
-    Args:
-        ad: android_device object to initiate connection on.
-        network: A dictionary representing the network to await connection. The
-                 dictionary must have the key "SSID".
-        num_of_tries: An integer that is the number of times to try before
-                      delaring failure. Default is 1.
-    """
-    expected_ssid = network[WifiEnums.SSID_KEY]
-    # First ensure that we're already connected to the provided network.
-    verify_con = {WifiEnums.SSID_KEY: expected_ssid}
-    verify_wifi_connection_info(ad, verify_con)
-    # Now toggle wifi state and wait for the connection event.
-    wifi_toggle_state(ad, False)
-    time.sleep(10)
-    wifi_toggle_state(ad, True)
-    ad.droid.wifiStartTrackingStateChange()
-    try:
-        connect_result = None
-        for i in range(num_of_tries):
-            try:
-                connect_result = ad.ed.pop_event(wifi_constants.WIFI_CONNECTED, 30)
-                break
-            except Empty:
-                pass
-        asserts.assert_true(
-            connect_result,
-            "Failed to connect to Wi-Fi network %s on %s" % (network, ad.serial),
-        )
-        logging.debug("Connection result on %s: %s.", ad.serial, connect_result)
-        actual_ssid = connect_result["data"][WifiEnums.SSID_KEY]
-        asserts.assert_equal(
-            actual_ssid,
-            expected_ssid,
-            "Connected to the wrong network on %s."
-            "Expected %s, but got %s." % (ad.serial, expected_ssid, actual_ssid),
-        )
-        logging.info("Connected to Wi-Fi network %s on %s", actual_ssid, ad.serial)
-    finally:
-        ad.droid.wifiStopTrackingStateChange()
-
-
-def wait_for_connect(
-    ad, expected_ssid=None, expected_id=None, tries=2, assert_on_fail=True
-):
-    """Wait for a connect event.
-
-    This will directly fail a test if anything goes wrong.
-
-    Args:
-        ad: An Android device object.
-        expected_ssid: SSID of the network to connect to.
-        expected_id: Network Id of the network to connect to.
-        tries: An integer that is the number of times to try before failing.
-        assert_on_fail: If True, error checks in this function will raise test
-                        failure signals.
-
-    Returns:
-        Returns a value only if assert_on_fail is false.
-        Returns True if the connection was successful, False otherwise.
-    """
-    return _assert_on_fail_handler(
-        _wait_for_connect, assert_on_fail, ad, expected_ssid, expected_id, tries
-    )
-
-
-def _wait_for_connect(ad, expected_ssid=None, expected_id=None, tries=2):
-    """Wait for a connect event.
-
-    Args:
-        ad: An Android device object.
-        expected_ssid: SSID of the network to connect to.
-        expected_id: Network Id of the network to connect to.
-        tries: An integer that is the number of times to try before failing.
-    """
-    ad.droid.wifiStartTrackingStateChange()
-    try:
-        connect_result = _wait_for_connect_event(
-            ad, ssid=expected_ssid, id=expected_id, tries=tries
-        )
-        asserts.assert_true(
-            connect_result, "Failed to connect to Wi-Fi network %s" % expected_ssid
-        )
-        ad.log.debug("Wi-Fi connection result: %s.", connect_result)
-        actual_ssid = connect_result["data"][WifiEnums.SSID_KEY]
-        if expected_ssid:
-            asserts.assert_equal(
-                actual_ssid, expected_ssid, "Connected to the wrong network"
-            )
-        actual_id = connect_result["data"][WifiEnums.NETID_KEY]
-        if expected_id:
-            asserts.assert_equal(
-                actual_id, expected_id, "Connected to the wrong network"
-            )
-        ad.log.info("Connected to Wi-Fi network %s.", actual_ssid)
-    except Empty:
-        asserts.fail("Failed to start connection process to %s" % expected_ssid)
-    except Exception as error:
-        ad.log.error("Failed to connect to %s with error %s", expected_ssid, error)
-        raise signals.TestFailure("Failed to connect to %s network" % expected_ssid)
-    finally:
-        ad.droid.wifiStopTrackingStateChange()
-
-
-def _wait_for_connect_event(ad, ssid=None, id=None, tries=1):
-    """Wait for a connect event on queue and pop when available.
-
-    Args:
-        ad: An Android device object.
-        ssid: SSID of the network to connect to.
-        id: Network Id of the network to connect to.
-        tries: An integer that is the number of times to try before failing.
-
-    Returns:
-        A dict with details of the connection data, which looks like this:
-        {
-         'time': 1485460337798,
-         'name': 'WifiNetworkConnected',
-         'data': {
-                  'rssi': -27,
-                  'is_24ghz': True,
-                  'mac_address': '02:00:00:00:00:00',
-                  'network_id': 1,
-                  'BSSID': '30:b5:c2:33:d3:fc',
-                  'ip_address': 117483712,
-                  'link_speed': 54,
-                  'supplicant_state': 'completed',
-                  'hidden_ssid': False,
-                  'SSID': 'wh_ap1_2g',
-                  'is_5ghz': False}
-        }
-
-    """
-    conn_result = None
-
-    # If ssid and network id is None, just wait for any connect event.
-    if id is None and ssid is None:
-        for i in range(tries):
-            try:
-                conn_result = ad.ed.pop_event(wifi_constants.WIFI_CONNECTED, 30)
-                break
-            except Empty:
-                pass
-    else:
-        # If ssid or network id is specified, wait for specific connect event.
-        for i in range(tries):
-            try:
-                conn_result = ad.ed.pop_event(wifi_constants.WIFI_CONNECTED, 30)
-                if id and conn_result["data"][WifiEnums.NETID_KEY] == id:
-                    break
-                elif ssid and conn_result["data"][WifiEnums.SSID_KEY] == ssid:
-                    break
-            except Empty:
-                pass
-
-    return conn_result
-
-
-def wait_for_disconnect(ad, timeout=10):
-    """Wait for a disconnect event within the specified timeout.
-
-    Args:
-        ad: Android device object.
-        timeout: Timeout in seconds.
-
-    """
-    try:
-        ad.droid.wifiStartTrackingStateChange()
-        event = ad.ed.pop_event("WifiNetworkDisconnected", timeout)
-    except Empty:
-        raise signals.TestFailure("Device did not disconnect from the network")
-    finally:
-        ad.droid.wifiStopTrackingStateChange()
-
-
-def ensure_no_disconnect(ad, duration=10):
-    """Ensure that there is no disconnect for the specified duration.
-
-    Args:
-        ad: Android device object.
-        duration: Duration in seconds.
-
-    """
-    try:
-        ad.droid.wifiStartTrackingStateChange()
-        event = ad.ed.pop_event("WifiNetworkDisconnected", duration)
-        raise signals.TestFailure("Device disconnected from the network")
-    except Empty:
-        pass
-    finally:
-        ad.droid.wifiStopTrackingStateChange()
-
-
-def connect_to_wifi_network(
-    ad,
-    network,
-    assert_on_fail=True,
-    check_connectivity=True,
-    hidden=False,
-    num_of_scan_tries=DEFAULT_SCAN_TRIES,
-    num_of_connect_tries=DEFAULT_CONNECT_TRIES,
-):
-    """Connection logic for open and psk wifi networks.
-
-    Args:
-        ad: AndroidDevice to use for connection
-        network: network info of the network to connect to
-        assert_on_fail: If true, errors from wifi_connect will raise
-                        test failure signals.
-        hidden: Is the Wifi network hidden.
-        num_of_scan_tries: The number of times to try scan
-                           interface before declaring failure.
-        num_of_connect_tries: The number of times to try
-                              connect wifi before declaring failure.
-    """
-    if hidden:
-        start_wifi_connection_scan_and_ensure_network_not_found(
-            ad, network[WifiEnums.SSID_KEY], max_tries=num_of_scan_tries
-        )
-    else:
-        start_wifi_connection_scan_and_ensure_network_found(
-            ad, network[WifiEnums.SSID_KEY], max_tries=num_of_scan_tries
-        )
-    wifi_connect(
-        ad,
-        network,
-        num_of_tries=num_of_connect_tries,
-        assert_on_fail=assert_on_fail,
-        check_connectivity=check_connectivity,
-    )
-
-
-def connect_to_wifi_network_with_id(ad, network_id, network_ssid):
-    """Connect to the given network using network id and verify SSID.
-
-    Args:
-        network_id: int Network Id of the network.
-        network_ssid: string SSID of the network.
-
-    Returns: True if connect using network id was successful;
-             False otherwise.
-
-    """
-    start_wifi_connection_scan_and_ensure_network_found(ad, network_ssid)
-    wifi_connect_by_id(ad, network_id)
-    connect_data = ad.droid.wifiGetConnectionInfo()
-    connect_ssid = connect_data[WifiEnums.SSID_KEY]
-    ad.log.debug(
-        "Expected SSID = %s Connected SSID = %s" % (network_ssid, connect_ssid)
-    )
-    if connect_ssid != network_ssid:
-        return False
-    return True
-
-
-def wifi_connect(
-    ad, network, num_of_tries=1, assert_on_fail=True, check_connectivity=True
-):
-    """Connect an Android device to a wifi network.
-
-    Initiate connection to a wifi network, wait for the "connected" event, then
-    confirm the connected ssid is the one requested.
-
-    This will directly fail a test if anything goes wrong.
-
-    Args:
-        ad: android_device object to initiate connection on.
-        network: A dictionary representing the network to connect to. The
-                 dictionary must have the key "SSID".
-        num_of_tries: An integer that is the number of times to try before
-                      delaring failure. Default is 1.
-        assert_on_fail: If True, error checks in this function will raise test
-                        failure signals.
-
-    Returns:
-        Returns a value only if assert_on_fail is false.
-        Returns True if the connection was successful, False otherwise.
-    """
-    return _assert_on_fail_handler(
-        _wifi_connect,
-        assert_on_fail,
-        ad,
-        network,
-        num_of_tries=num_of_tries,
-        check_connectivity=check_connectivity,
-    )
-
-
-def _wifi_connect(ad, network, num_of_tries=1, check_connectivity=True):
-    """Connect an Android device to a wifi network.
-
-    Initiate connection to a wifi network, wait for the "connected" event, then
-    confirm the connected ssid is the one requested.
-
-    This will directly fail a test if anything goes wrong.
-
-    Args:
-        ad: android_device object to initiate connection on.
-        network: A dictionary representing the network to connect to. The
-                 dictionary must have the key "SSID".
-        num_of_tries: An integer that is the number of times to try before
-                      delaring failure. Default is 1.
-    """
-    asserts.assert_true(
-        WifiEnums.SSID_KEY in network,
-        "Key '%s' must be present in network definition." % WifiEnums.SSID_KEY,
-    )
-    ad.droid.wifiStartTrackingStateChange()
-    expected_ssid = network[WifiEnums.SSID_KEY]
-    ad.droid.wifiConnectByConfig(network)
-    ad.log.info("Starting connection process to %s", expected_ssid)
-    try:
-        event = ad.ed.pop_event(wifi_constants.CONNECT_BY_CONFIG_SUCCESS, 30)
-        connect_result = _wait_for_connect_event(
-            ad, ssid=expected_ssid, tries=num_of_tries
-        )
-        asserts.assert_true(
-            connect_result,
-            "Failed to connect to Wi-Fi network %s on %s" % (network, ad.serial),
-        )
-        ad.log.debug("Wi-Fi connection result: %s.", connect_result)
-        actual_ssid = connect_result["data"][WifiEnums.SSID_KEY]
-        asserts.assert_equal(
-            actual_ssid,
-            expected_ssid,
-            "Connected to the wrong network on %s." % ad.serial,
-        )
-        ad.log.info("Connected to Wi-Fi network %s.", actual_ssid)
-
-        if check_connectivity:
-            internet = validate_connection(ad, DEFAULT_PING_ADDR)
-            if not internet:
-                raise signals.TestFailure(
-                    "Failed to connect to internet on %s" % expected_ssid
-                )
-    except Empty:
-        asserts.fail(
-            "Failed to start connection process to %s on %s" % (network, ad.serial)
-        )
-    except Exception as error:
-        ad.log.error("Failed to connect to %s with error %s", expected_ssid, error)
-        raise signals.TestFailure("Failed to connect to %s network" % network)
-
-    finally:
-        ad.droid.wifiStopTrackingStateChange()
-
-
-def wifi_connect_by_id(ad, network_id, num_of_tries=3, assert_on_fail=True):
-    """Connect an Android device to a wifi network using network Id.
-
-    Start connection to the wifi network, with the given network Id, wait for
-    the "connected" event, then verify the connected network is the one requested.
-
-    This will directly fail a test if anything goes wrong.
-
-    Args:
-        ad: android_device object to initiate connection on.
-        network_id: Integer specifying the network id of the network.
-        num_of_tries: An integer that is the number of times to try before
-                      delaring failure. Default is 1.
-        assert_on_fail: If True, error checks in this function will raise test
-                        failure signals.
-
-    Returns:
-        Returns a value only if assert_on_fail is false.
-        Returns True if the connection was successful, False otherwise.
-    """
-    _assert_on_fail_handler(
-        _wifi_connect_by_id, assert_on_fail, ad, network_id, num_of_tries
-    )
-
-
-def _wifi_connect_by_id(ad, network_id, num_of_tries=1):
-    """Connect an Android device to a wifi network using it's network id.
-
-    Start connection to the wifi network, with the given network id, wait for
-    the "connected" event, then verify the connected network is the one requested.
-
-    Args:
-        ad: android_device object to initiate connection on.
-        network_id: Integer specifying the network id of the network.
-        num_of_tries: An integer that is the number of times to try before
-                      delaring failure. Default is 1.
-    """
-    ad.droid.wifiStartTrackingStateChange()
-    # Clear all previous events.
-    ad.ed.clear_all_events()
-    ad.droid.wifiConnectByNetworkId(network_id)
-    ad.log.info("Starting connection to network with id %d", network_id)
-    try:
-        event = ad.ed.pop_event(wifi_constants.CONNECT_BY_NETID_SUCCESS, 60)
-        connect_result = _wait_for_connect_event(ad, id=network_id, tries=num_of_tries)
-        asserts.assert_true(
-            connect_result, "Failed to connect to Wi-Fi network using network id"
-        )
-        ad.log.debug("Wi-Fi connection result: %s", connect_result)
-        actual_id = connect_result["data"][WifiEnums.NETID_KEY]
-        asserts.assert_equal(
-            actual_id,
-            network_id,
-            "Connected to the wrong network on %s."
-            "Expected network id = %d, but got %d."
-            % (ad.serial, network_id, actual_id),
-        )
-        expected_ssid = connect_result["data"][WifiEnums.SSID_KEY]
-        ad.log.info(
-            "Connected to Wi-Fi network %s with %d network id.",
-            expected_ssid,
-            network_id,
-        )
-
-        internet = validate_connection(ad, DEFAULT_PING_ADDR)
-        if not internet:
-            raise signals.TestFailure(
-                "Failed to connect to internet on %s" % expected_ssid
-            )
-    except Empty:
-        asserts.fail(
-            "Failed to connect to network with id %d on %s" % (network_id, ad.serial)
-        )
-    except Exception as error:
-        ad.log.error(
-            "Failed to connect to network with id %d with error %s", network_id, error
-        )
-        raise signals.TestFailure(
-            "Failed to connect to network with network" " id %d" % network_id
-        )
-    finally:
-        ad.droid.wifiStopTrackingStateChange()
-
-
-def wifi_connect_using_network_request(ad, network, network_specifier, num_of_tries=3):
-    """Connect an Android device to a wifi network using network request.
-
-    Trigger a network request with the provided network specifier,
-    wait for the "onMatch" event, ensure that the scan results in "onMatch"
-    event contain the specified network, then simulate the user granting the
-    request with the specified network selected. Then wait for the "onAvailable"
-    network callback indicating successful connection to network.
-
-    Args:
-        ad: android_device object to initiate connection on.
-        network_specifier: A dictionary representing the network specifier to
-                           use.
-        network: A dictionary representing the network to connect to. The
-                 dictionary must have the key "SSID".
-        num_of_tries: An integer that is the number of times to try before
-                      delaring failure.
-    Returns:
-        key: Key corresponding to network request.
-    """
-    key = ad.droid.connectivityRequestWifiNetwork(network_specifier, 0)
-    ad.log.info("Sent network request %s with %s " % (key, network_specifier))
-    # Need a delay here because UI interaction should only start once wifi
-    # starts processing the request.
-    time.sleep(wifi_constants.NETWORK_REQUEST_CB_REGISTER_DELAY_SEC)
-    _wait_for_wifi_connect_after_network_request(ad, network, key, num_of_tries)
-    return key
-
-
-def wait_for_wifi_connect_after_network_request(
-    ad, network, key, num_of_tries=3, assert_on_fail=True
-):
-    """
-    Simulate and verify the connection flow after initiating the network
-    request.
-
-    Wait for the "onMatch" event, ensure that the scan results in "onMatch"
-    event contain the specified network, then simulate the user granting the
-    request with the specified network selected. Then wait for the "onAvailable"
-    network callback indicating successful connection to network.
-
-    Args:
-        ad: android_device object to initiate connection on.
-        network: A dictionary representing the network to connect to. The
-                 dictionary must have the key "SSID".
-        key: Key corresponding to network request.
-        num_of_tries: An integer that is the number of times to try before
-                      delaring failure.
-        assert_on_fail: If True, error checks in this function will raise test
-                        failure signals.
-
-    Returns:
-        Returns a value only if assert_on_fail is false.
-        Returns True if the connection was successful, False otherwise.
-    """
-    _assert_on_fail_handler(
-        _wait_for_wifi_connect_after_network_request,
-        assert_on_fail,
-        ad,
-        network,
-        key,
-        num_of_tries,
-    )
-
-
-def _wait_for_wifi_connect_after_network_request(ad, network, key, num_of_tries=3):
-    """
-    Simulate and verify the connection flow after initiating the network
-    request.
-
-    Wait for the "onMatch" event, ensure that the scan results in "onMatch"
-    event contain the specified network, then simulate the user granting the
-    request with the specified network selected. Then wait for the "onAvailable"
-    network callback indicating successful connection to network.
-
-    Args:
-        ad: android_device object to initiate connection on.
-        network: A dictionary representing the network to connect to. The
-                 dictionary must have the key "SSID".
-        key: Key corresponding to network request.
-        num_of_tries: An integer that is the number of times to try before
-                      delaring failure.
-    """
-    asserts.assert_true(
-        WifiEnums.SSID_KEY in network,
-        "Key '%s' must be present in network definition." % WifiEnums.SSID_KEY,
-    )
-    ad.droid.wifiStartTrackingStateChange()
-    expected_ssid = network[WifiEnums.SSID_KEY]
-    ad.droid.wifiRegisterNetworkRequestMatchCallback()
-    # Wait for the platform to scan and return a list of networks
-    # matching the request
-    try:
-        matched_network = None
-        for _ in [0, num_of_tries]:
-            on_match_event = ad.ed.pop_event(
-                wifi_constants.WIFI_NETWORK_REQUEST_MATCH_CB_ON_MATCH, 60
-            )
-            asserts.assert_true(
-                on_match_event, "Network request on match not received."
-            )
-            matched_scan_results = on_match_event["data"]
-            ad.log.debug("Network request on match results %s", matched_scan_results)
-            matched_network = match_networks(
-                {WifiEnums.SSID_KEY: network[WifiEnums.SSID_KEY]}, matched_scan_results
-            )
-            ad.log.debug("Network request on match %s", matched_network)
-            if matched_network:
-                break
-
-        asserts.assert_true(matched_network, "Target network %s not found" % network)
-
-        ad.droid.wifiSendUserSelectionForNetworkRequestMatch(network)
-        ad.log.info("Sent user selection for network request %s", expected_ssid)
-
-        # Wait for the platform to connect to the network.
-        connected_network = None
-        # WifiInfo is attached to TransportInfo only in S.
-        if not ad.droid.isSdkAtLeastS():
-            connected_network = ad.droid.wifiGetConnectionInfo()
-        ad.log.info("Connected to network %s", connected_network)
-        asserts.assert_equal(
-            connected_network[WifiEnums.SSID_KEY],
-            expected_ssid,
-            "Connected to the wrong network."
-            "Expected %s, but got %s." % (network, connected_network),
-        )
-    except Empty:
-        asserts.fail("Failed to connect to %s" % expected_ssid)
-    except Exception as error:
-        ad.log.error("Failed to connect to %s with error %s" % (expected_ssid, error))
-        raise signals.TestFailure("Failed to connect to %s network" % network)
-    finally:
-        ad.droid.wifiStopTrackingStateChange()
-
-
-def wifi_passpoint_connect(ad, passpoint_network, num_of_tries=1, assert_on_fail=True):
-    """Connect an Android device to a wifi network.
-
-    Initiate connection to a wifi network, wait for the "connected" event, then
-    confirm the connected ssid is the one requested.
-
-    This will directly fail a test if anything goes wrong.
-
-    Args:
-        ad: android_device object to initiate connection on.
-        passpoint_network: SSID of the Passpoint network to connect to.
-        num_of_tries: An integer that is the number of times to try before
-                      delaring failure. Default is 1.
-        assert_on_fail: If True, error checks in this function will raise test
-                        failure signals.
-
-    Returns:
-        If assert_on_fail is False, function returns network id, if the connect was
-        successful, False otherwise. If assert_on_fail is True, no return value.
-    """
-    _assert_on_fail_handler(
-        _wifi_passpoint_connect,
-        assert_on_fail,
-        ad,
-        passpoint_network,
-        num_of_tries=num_of_tries,
-    )
-
-
-def _wifi_passpoint_connect(ad, passpoint_network, num_of_tries=1):
-    """Connect an Android device to a wifi network.
-
-    Initiate connection to a wifi network, wait for the "connected" event, then
-    confirm the connected ssid is the one requested.
-
-    This will directly fail a test if anything goes wrong.
-
-    Args:
-        ad: android_device object to initiate connection on.
-        passpoint_network: SSID of the Passpoint network to connect to.
-        num_of_tries: An integer that is the number of times to try before
-                      delaring failure. Default is 1.
-    """
-    ad.droid.wifiStartTrackingStateChange()
-    expected_ssid = passpoint_network
-    ad.log.info("Starting connection process to passpoint %s", expected_ssid)
-
-    try:
-        connect_result = _wait_for_connect_event(ad, expected_ssid, num_of_tries)
-        asserts.assert_true(
-            connect_result,
-            "Failed to connect to WiFi passpoint network %s on"
-            " %s" % (expected_ssid, ad.serial),
-        )
-        ad.log.info("Wi-Fi connection result: %s.", connect_result)
-        actual_ssid = connect_result["data"][WifiEnums.SSID_KEY]
-        asserts.assert_equal(
-            actual_ssid,
-            expected_ssid,
-            "Connected to the wrong network on %s." % ad.serial,
-        )
-        ad.log.info("Connected to Wi-Fi passpoint network %s.", actual_ssid)
-
-        internet = validate_connection(ad, DEFAULT_PING_ADDR)
-        if not internet:
-            raise signals.TestFailure(
-                "Failed to connect to internet on %s" % expected_ssid
-            )
-    except Exception as error:
-        ad.log.error(
-            "Failed to connect to passpoint network %s with error %s",
-            expected_ssid,
-            error,
-        )
-        raise signals.TestFailure(
-            "Failed to connect to %s passpoint network" % expected_ssid
-        )
-
-    finally:
-        ad.droid.wifiStopTrackingStateChange()
-
-
-def delete_passpoint(ad, fqdn):
-    """Delete a required Passpoint configuration."""
-    try:
-        ad.droid.removePasspointConfig(fqdn)
-        return True
-    except Exception as error:
-        ad.log.error(
-            "Failed to remove passpoint configuration with FQDN=%s " "and error=%s",
-            fqdn,
-            error,
-        )
-        return False
-
-
-def start_wifi_single_scan(ad, scan_setting):
-    """Starts wifi single shot scan.
-
-    Args:
-        ad: android_device object to initiate connection on.
-        scan_setting: A dict representing the settings of the scan.
-
-    Returns:
-        If scan was started successfully, event data of success event is returned.
-    """
-    idx = ad.droid.wifiScannerStartScan(scan_setting)
-    event = ad.ed.pop_event("WifiScannerScan%sonSuccess" % idx, SHORT_TIMEOUT)
-    ad.log.debug("Got event %s", event)
-    return event["data"]
-
-
-def track_connection(ad, network_ssid, check_connection_count):
-    """Track wifi connection to network changes for given number of counts
-
-    Args:
-        ad: android_device object for forget network.
-        network_ssid: network ssid to which connection would be tracked
-        check_connection_count: Integer for maximum number network connection
-                                check.
-    Returns:
-        True if connection to given network happen, else return False.
-    """
-    ad.droid.wifiStartTrackingStateChange()
-    while check_connection_count > 0:
-        connect_network = ad.ed.pop_event("WifiNetworkConnected", 120)
-        ad.log.info("Connected to network %s", connect_network)
-        if (
-            WifiEnums.SSID_KEY in connect_network["data"]
-            and connect_network["data"][WifiEnums.SSID_KEY] == network_ssid
-        ):
-            return True
-        check_connection_count -= 1
-    ad.droid.wifiStopTrackingStateChange()
-    return False
-
-
-def get_scan_time_and_channels(wifi_chs, scan_setting, stime_channel):
-    """Calculate the scan time required based on the band or channels in scan
-    setting
-
-    Args:
-        wifi_chs: Object of channels supported
-        scan_setting: scan setting used for start scan
-        stime_channel: scan time per channel
-
-    Returns:
-        scan_time: time required for completing a scan
-        scan_channels: channel used for scanning
-    """
-    scan_time = 0
-    scan_channels = []
-    if "band" in scan_setting and "channels" not in scan_setting:
-        scan_channels = wifi_chs.band_to_freq(scan_setting["band"])
-    elif "channels" in scan_setting and "band" not in scan_setting:
-        scan_channels = scan_setting["channels"]
-    scan_time = len(scan_channels) * stime_channel
-    for channel in scan_channels:
-        if channel in WifiEnums.DFS_5G_FREQUENCIES:
-            scan_time += 132  # passive scan time on DFS
-    return scan_time, scan_channels
-
-
-def start_wifi_track_bssid(ad, track_setting):
-    """Start tracking Bssid for the given settings.
-
-    Args:
-      ad: android_device object.
-      track_setting: Setting for which the bssid tracking should be started
-
-    Returns:
-      If tracking started successfully, event data of success event is returned.
-    """
-    idx = ad.droid.wifiScannerStartTrackingBssids(
-        track_setting["bssidInfos"], track_setting["apLostThreshold"]
-    )
-    event = ad.ed.pop_event("WifiScannerBssid{}onSuccess".format(idx), SHORT_TIMEOUT)
-    return event["data"]
-
-
-def convert_pem_key_to_pkcs8(in_file, out_file):
-    """Converts the key file generated by us to the format required by
-    Android using openssl.
-
-    The input file must have the extension "pem". The output file must
-    have the extension "der".
-
-    Args:
-        in_file: The original key file.
-        out_file: The full path to the converted key file, including
-        filename.
-    """
-    asserts.assert_true(in_file.endswith(".pem"), "Input file has to be .pem.")
-    asserts.assert_true(out_file.endswith(".der"), "Output file has to be .der.")
-    cmd = (
-        "openssl pkcs8 -inform PEM -in {} -outform DER -out {} -nocrypt" " -topk8"
-    ).format(in_file, out_file)
-    utils.exe_cmd(cmd)
-
-
-def validate_connection(
-    ad, ping_addr=DEFAULT_PING_ADDR, wait_time=15, ping_gateway=True
-):
-    """Validate internet connection by pinging the address provided.
-
-    Args:
-        ad: android_device object.
-        ping_addr: address on internet for pinging.
-        wait_time: wait for some time before validating connection
-
-    Returns:
-        ping output if successful, NULL otherwise.
-    """
-    android_version = int(ad.adb.shell("getprop ro.vendor.build.version.release"))
-    # wait_time to allow for DHCP to complete.
-    for i in range(wait_time):
-        if ad.droid.connectivityNetworkIsConnected():
-            if (
-                android_version > 10 and ad.droid.connectivityGetIPv4DefaultGateway()
-            ) or android_version < 11:
-                break
-        time.sleep(1)
-    ping = False
-    try:
-        ping = ad.droid.httpPing(ping_addr)
-        ad.log.info("Http ping result: %s.", ping)
-    except:
-        pass
-    if android_version > 10 and not ping and ping_gateway:
-        ad.log.info("Http ping failed. Pinging default gateway")
-        gw = ad.droid.connectivityGetIPv4DefaultGateway()
-        result = ad.adb.shell("ping -c 6 {}".format(gw))
-        ad.log.info("Default gateway ping result: %s" % result)
-        ping = False if "100% packet loss" in result else True
-    return ping
-
-
-# TODO(angli): This can only verify if an actual value is exactly the same.
-# Would be nice to be able to verify an actual value is one of serveral.
-def verify_wifi_connection_info(ad, expected_con):
-    """Verifies that the information of the currently connected wifi network is
-    as expected.
-
-    Args:
-        expected_con: A dict representing expected key-value pairs for wifi
-            connection. e.g. {"SSID": "test_wifi"}
-    """
-    current_con = ad.droid.wifiGetConnectionInfo()
-    case_insensitive = ["BSSID", "supplicant_state"]
-    ad.log.debug("Current connection: %s", current_con)
-    for k, expected_v in expected_con.items():
-        # Do not verify authentication related fields.
-        if k == "password":
-            continue
-        msg = "Field %s does not exist in wifi connection info %s." % (k, current_con)
-        if k not in current_con:
-            raise signals.TestFailure(msg)
-        actual_v = current_con[k]
-        if k in case_insensitive:
-            actual_v = actual_v.lower()
-            expected_v = expected_v.lower()
-        msg = "Expected %s to be %s, actual %s is %s." % (k, expected_v, k, actual_v)
-        if actual_v != expected_v:
-            raise signals.TestFailure(msg)
-
-
-def check_autoconnect_to_open_network(ad, conn_timeout=WIFI_CONNECTION_TIMEOUT_DEFAULT):
-    """Connects to any open WiFI AP
-    Args:
-        timeout value in sec to wait for UE to connect to a WiFi AP
-    Returns:
-        True if UE connects to WiFi AP (supplicant_state = completed)
-        False if UE fails to complete connection within WIFI_CONNECTION_TIMEOUT time.
-    """
-    if ad.droid.wifiCheckState():
-        return True
-    ad.droid.wifiToggleState()
-    wifi_connection_state = None
-    timeout = time.time() + conn_timeout
-    while wifi_connection_state != "completed":
-        wifi_connection_state = ad.droid.wifiGetConnectionInfo()["supplicant_state"]
-        if time.time() > timeout:
-            ad.log.warning("Failed to connect to WiFi AP")
-            return False
-    return True
-
-
-def expand_enterprise_config_by_phase2(config):
-    """Take an enterprise config and generate a list of configs, each with
-    a different phase2 auth type.
-
-    Args:
-        config: A dict representing enterprise config.
-
-    Returns
-        A list of enterprise configs.
-    """
-    results = []
-    phase2_types = WifiEnums.EapPhase2
-    if config[WifiEnums.Enterprise.EAP] == WifiEnums.Eap.PEAP:
-        # Skip unsupported phase2 types for PEAP.
-        phase2_types = [WifiEnums.EapPhase2.GTC, WifiEnums.EapPhase2.MSCHAPV2]
-    for phase2_type in phase2_types:
-        # Skip a special case for passpoint TTLS.
-        if (
-            WifiEnums.Enterprise.FQDN in config
-            and phase2_type == WifiEnums.EapPhase2.GTC
-        ):
-            continue
-        c = dict(config)
-        c[WifiEnums.Enterprise.PHASE2] = phase2_type.value
-        results.append(c)
-    return results
-
-
-def generate_eap_test_name(config, ad=None):
-    """Generates a test case name based on an EAP configuration.
-
-    Args:
-        config: A dict representing an EAP credential.
-        ad object: Redundant but required as the same param is passed
-                   to test_func in run_generated_tests
-
-    Returns:
-        A string representing the name of a generated EAP test case.
-    """
-    eap = WifiEnums.Eap
-    eap_phase2 = WifiEnums.EapPhase2
-    Ent = WifiEnums.Enterprise
-    name = "test_connect-"
-    eap_name = ""
-    for e in eap:
-        if e.value == config[Ent.EAP]:
-            eap_name = e.name
-            break
-    if "peap0" in config[WifiEnums.SSID_KEY].lower():
-        eap_name = "PEAP0"
-    if "peap1" in config[WifiEnums.SSID_KEY].lower():
-        eap_name = "PEAP1"
-    name += eap_name
-    if Ent.PHASE2 in config:
-        for e in eap_phase2:
-            if e.value == config[Ent.PHASE2]:
-                name += "-{}".format(e.name)
-                break
-    return name
-
-
-def group_attenuators(attenuators):
-    """Groups a list of attenuators into attenuator groups for backward
-    compatibility reasons.
-
-    Most legacy Wi-Fi setups have two attenuators each connected to a separate
-    AP. The new Wi-Fi setup has four attenuators, each connected to one channel
-    on an AP, so two of them are connected to one AP.
-
-    To make the existing scripts work in the new setup, when the script needs
-    to attenuate one AP, it needs to set attenuation on both attenuators
-    connected to the same AP.
-
-    This function groups attenuators properly so the scripts work in both
-    legacy and new Wi-Fi setups.
-
-    Args:
-        attenuators: A list of attenuator objects, either two or four in length.
-
-    Raises:
-        signals.TestFailure is raised if the attenuator list does not have two
-        or four objects.
-    """
-    attn0 = attenuator.AttenuatorGroup("AP0")
-    attn1 = attenuator.AttenuatorGroup("AP1")
-    # Legacy testbed setup has two attenuation channels.
-    num_of_attns = len(attenuators)
-    if num_of_attns == 2:
-        attn0.add(attenuators[0])
-        attn1.add(attenuators[1])
-    elif num_of_attns == 4:
-        attn0.add(attenuators[0])
-        attn0.add(attenuators[1])
-        attn1.add(attenuators[2])
-        attn1.add(attenuators[3])
-    else:
-        asserts.fail(
-            (
-                "Either two or four attenuators are required for this "
-                "test, but found %s"
-            )
-            % num_of_attns
-        )
-    return [attn0, attn1]
-
-
-def set_attns(attenuator, attn_val_name, roaming_attn=ROAMING_ATTN):
-    """Sets attenuation values on attenuators used in this test.
-
-    Args:
-        attenuator: The attenuator object.
-        attn_val_name: Name of the attenuation value pair to use.
-        roaming_attn: Dictionary specifying the attenuation params.
-    """
-    logging.info("Set attenuation values to %s", roaming_attn[attn_val_name])
-    try:
-        attenuator[0].set_atten(roaming_attn[attn_val_name][0])
-        attenuator[1].set_atten(roaming_attn[attn_val_name][1])
-        attenuator[2].set_atten(roaming_attn[attn_val_name][2])
-        attenuator[3].set_atten(roaming_attn[attn_val_name][3])
-    except:
-        logging.exception("Failed to set attenuation values %s.", attn_val_name)
-        raise
-
-
-def set_attns_steps(
-    attenuators, atten_val_name, roaming_attn=ROAMING_ATTN, steps=10, wait_time=12
-):
-    """Set attenuation values on attenuators used in this test. It will change
-    the attenuation values linearly from current value to target value step by
-    step.
-
-    Args:
-        attenuators: The list of attenuator objects that you want to change
-                     their attenuation value.
-        atten_val_name: Name of the attenuation value pair to use.
-        roaming_attn: Dictionary specifying the attenuation params.
-        steps: Number of attenuator changes to reach the target value.
-        wait_time: Sleep time for each change of attenuator.
-    """
-    logging.info(
-        "Set attenuation values to %s in %d step(s)",
-        roaming_attn[atten_val_name],
-        steps,
-    )
-    start_atten = [attenuator.get_atten() for attenuator in attenuators]
-    target_atten = roaming_attn[atten_val_name]
-    for current_step in range(steps):
-        progress = (current_step + 1) / steps
-        for i, attenuator in enumerate(attenuators):
-            amount_since_start = (target_atten[i] - start_atten[i]) * progress
-            attenuator.set_atten(round(start_atten[i] + amount_since_start))
-        time.sleep(wait_time)
-
-
-def trigger_roaming_and_validate(
-    dut, attenuator, attn_val_name, expected_con, roaming_attn=ROAMING_ATTN
-):
-    """Sets attenuators to trigger roaming and validate the DUT connected
-    to the BSSID expected.
-
-    Args:
-        attenuator: The attenuator object.
-        attn_val_name: Name of the attenuation value pair to use.
-        expected_con: The network information of the expected network.
-        roaming_attn: Dictionary specifying the attenaution params.
-    """
-    expected_con = {
-        WifiEnums.SSID_KEY: expected_con[WifiEnums.SSID_KEY],
-        WifiEnums.BSSID_KEY: expected_con["bssid"],
-    }
-    set_attns_steps(attenuator, attn_val_name, roaming_attn)
-
-    verify_wifi_connection_info(dut, expected_con)
-    expected_bssid = expected_con[WifiEnums.BSSID_KEY]
-    logging.info("Roamed to %s successfully", expected_bssid)
-    if not validate_connection(dut):
-        raise signals.TestFailure("Fail to connect to internet on %s" % expected_bssid)
-
-
-def create_softap_config():
-    """Create a softap config with random ssid and password."""
-    ap_ssid = "softap_" + utils.rand_ascii_str(8)
-    ap_password = utils.rand_ascii_str(8)
-    logging.info("softap setup: %s %s", ap_ssid, ap_password)
-    config = {
-        WifiEnums.SSID_KEY: ap_ssid,
-        WifiEnums.PWD_KEY: ap_password,
-    }
-    return config
-
-
-def wait_for_expected_number_of_softap_clients(
-    ad, callbackId, expected_num_of_softap_clients
-):
-    """Wait for the number of softap clients to be updated as expected.
-    Args:
-        callbackId: Id of the callback associated with registering.
-        expected_num_of_softap_clients: expected number of softap clients.
-    """
-    eventStr = (
-        wifi_constants.SOFTAP_CALLBACK_EVENT
-        + str(callbackId)
-        + wifi_constants.SOFTAP_NUMBER_CLIENTS_CHANGED
-    )
-    clientData = ad.ed.pop_event(eventStr, SHORT_TIMEOUT)["data"]
-    clientCount = clientData[wifi_constants.SOFTAP_NUMBER_CLIENTS_CALLBACK_KEY]
-    clientMacAddresses = clientData[wifi_constants.SOFTAP_CLIENTS_MACS_CALLBACK_KEY]
-    asserts.assert_equal(
-        clientCount,
-        expected_num_of_softap_clients,
-        "The number of softap clients doesn't match the expected number",
-    )
-    asserts.assert_equal(
-        len(clientMacAddresses),
-        expected_num_of_softap_clients,
-        "The number of mac addresses doesn't match the expected number",
-    )
-    for macAddress in clientMacAddresses:
-        asserts.assert_true(
-            checkMacAddress(macAddress), "An invalid mac address was returned"
-        )
-
-
-def checkMacAddress(input):
-    """Validate whether a string is a valid mac address or not.
-
-    Args:
-        input: The string to validate.
-
-    Returns: True/False, returns true for a valid mac address and false otherwise.
-    """
-    macValidationRegex = "[0-9a-f]{2}([-:]?)[0-9a-f]{2}(\\1[0-9a-f]{2}){4}$"
-    if re.match(macValidationRegex, input.lower()):
-        return True
-    return False
-
-
-def wait_for_expected_softap_state(ad, callbackId, expected_softap_state):
-    """Wait for the expected softap state change.
-    Args:
-        callbackId: Id of the callback associated with registering.
-        expected_softap_state: The expected softap state.
-    """
-    eventStr = (
-        wifi_constants.SOFTAP_CALLBACK_EVENT
-        + str(callbackId)
-        + wifi_constants.SOFTAP_STATE_CHANGED
-    )
-    asserts.assert_equal(
-        ad.ed.pop_event(eventStr, SHORT_TIMEOUT)["data"][
-            wifi_constants.SOFTAP_STATE_CHANGE_CALLBACK_KEY
-        ],
-        expected_softap_state,
-        "Softap state doesn't match with expected state",
-    )
-
-
-def get_current_number_of_softap_clients(ad, callbackId):
-    """pop up all of softap client updated event from queue.
-    Args:
-        callbackId: Id of the callback associated with registering.
-
-    Returns:
-        If exist aleast callback, returns last updated number_of_softap_clients.
-        Returns None when no any match callback event in queue.
-    """
-    eventStr = (
-        wifi_constants.SOFTAP_CALLBACK_EVENT
-        + str(callbackId)
-        + wifi_constants.SOFTAP_NUMBER_CLIENTS_CHANGED
-    )
-    events = ad.ed.pop_all(eventStr)
-    for event in events:
-        num_of_clients = event["data"][
-            wifi_constants.SOFTAP_NUMBER_CLIENTS_CALLBACK_KEY
-        ]
-    if len(events) == 0:
-        return None
-    return num_of_clients
-
-
-def get_current_softap_info(ad, callbackId, need_to_wait):
-    """pop up all of softap info changed event from queue.
-    Args:
-        callbackId: Id of the callback associated with registering.
-        need_to_wait: Wait for the info callback event before pop all.
-    Returns:
-        Returns last updated information of softap.
-    """
-    eventStr = (
-        wifi_constants.SOFTAP_CALLBACK_EVENT
-        + str(callbackId)
-        + wifi_constants.SOFTAP_INFO_CHANGED
-    )
-    ad.log.debug("softap info dump from eventStr %s", eventStr)
-    frequency = 0
-    bandwidth = 0
-    if need_to_wait:
-        event = ad.ed.pop_event(eventStr, SHORT_TIMEOUT)
-        frequency = event["data"][wifi_constants.SOFTAP_INFO_FREQUENCY_CALLBACK_KEY]
-        bandwidth = event["data"][wifi_constants.SOFTAP_INFO_BANDWIDTH_CALLBACK_KEY]
-        ad.log.info(
-            "softap info updated, frequency is %s, bandwidth is %s",
-            frequency,
-            bandwidth,
-        )
-
-    events = ad.ed.pop_all(eventStr)
-    for event in events:
-        frequency = event["data"][wifi_constants.SOFTAP_INFO_FREQUENCY_CALLBACK_KEY]
-        bandwidth = event["data"][wifi_constants.SOFTAP_INFO_BANDWIDTH_CALLBACK_KEY]
-    ad.log.info("softap info, frequency is %s, bandwidth is %s", frequency, bandwidth)
-    return frequency, bandwidth
-
-
-def get_current_softap_infos(ad, callbackId, need_to_wait):
-    """pop up all of softap info list changed event from queue.
-    Args:
-        callbackId: Id of the callback associated with registering.
-        need_to_wait: Wait for the info callback event before pop all.
-    Returns:
-        Returns last updated informations of softap.
-    """
-    eventStr = (
-        wifi_constants.SOFTAP_CALLBACK_EVENT
-        + str(callbackId)
-        + wifi_constants.SOFTAP_INFOLIST_CHANGED
-    )
-    ad.log.debug("softap info dump from eventStr %s", eventStr)
-
-    if need_to_wait:
-        event = ad.ed.pop_event(eventStr, SHORT_TIMEOUT)
-        infos = event["data"]
-
-    events = ad.ed.pop_all(eventStr)
-    for event in events:
-        infos = event["data"]
-
-    for info in infos:
-        frequency = info[wifi_constants.SOFTAP_INFO_FREQUENCY_CALLBACK_KEY]
-        bandwidth = info[wifi_constants.SOFTAP_INFO_BANDWIDTH_CALLBACK_KEY]
-        wifistandard = info[wifi_constants.SOFTAP_INFO_WIFISTANDARD_CALLBACK_KEY]
-        bssid = info[wifi_constants.SOFTAP_INFO_BSSID_CALLBACK_KEY]
-        ad.log.info(
-            "softap info, freq:%s, bw:%s, wifistandard:%s, bssid:%s",
-            frequency,
-            bandwidth,
-            wifistandard,
-            bssid,
-        )
-
-    return infos
-
-
-def get_current_softap_capability(ad, callbackId, need_to_wait):
-    """pop up all of softap info list changed event from queue.
-    Args:
-        callbackId: Id of the callback associated with registering.
-        need_to_wait: Wait for the info callback event before pop all.
-    Returns:
-        Returns last updated capability of softap.
-    """
-    eventStr = (
-        wifi_constants.SOFTAP_CALLBACK_EVENT
-        + str(callbackId)
-        + wifi_constants.SOFTAP_CAPABILITY_CHANGED
-    )
-    ad.log.debug("softap capability dump from eventStr %s", eventStr)
-    if need_to_wait:
-        event = ad.ed.pop_event(eventStr, SHORT_TIMEOUT)
-        capability = event["data"]
-
-    events = ad.ed.pop_all(eventStr)
-    for event in events:
-        capability = event["data"]
-
-    return capability
-
-
-def get_ssrdumps(ad):
-    """Pulls dumps in the ssrdump dir
-    Args:
-        ad: android device object.
-    """
-    logs = ad.get_file_names("/data/vendor/ssrdump/")
-    if logs:
-        ad.log.info("Pulling ssrdumps %s", logs)
-        log_path = os.path.join(ad.device_log_path, "SSRDUMPS_%s" % ad.serial)
-        os.makedirs(log_path, exist_ok=True)
-        ad.pull_files(logs, log_path)
-    ad.adb.shell("find /data/vendor/ssrdump/ -type f -delete", ignore_status=True)
-
-
-def start_pcap(pcap, wifi_band, test_name):
-    """Start packet capture in monitor mode.
-
-    Args:
-        pcap: packet capture object
-        wifi_band: '2g' or '5g' or 'dual'
-        test_name: test name to be used for pcap file name
-
-    Returns:
-        Dictionary with wifi band as key and the tuple
-        (pcap Process object, log directory) as the value
-    """
-    log_dir = os.path.join(
-        context.get_current_context().get_full_output_path(), "PacketCapture"
-    )
-    os.makedirs(log_dir, exist_ok=True)
-    if wifi_band == "dual":
-        bands = [BAND_2G, BAND_5G]
-    else:
-        bands = [wifi_band]
-    procs = {}
-    for band in bands:
-        proc = pcap.start_packet_capture(band, log_dir, test_name)
-        procs[band] = (proc, os.path.join(log_dir, test_name))
-    return procs
-
-
-def stop_pcap(pcap, procs, test_status=None):
-    """Stop packet capture in monitor mode.
-
-    Since, the pcap logs in monitor mode can be very large, we will
-    delete them if they are not required. 'test_status' if True, will delete
-    the pcap files. If False, we will keep them.
-
-    Args:
-        pcap: packet capture object
-        procs: dictionary returned by start_pcap
-        test_status: status of the test case
-    """
-    for proc, fname in procs.values():
-        pcap.stop_packet_capture(proc)
-
-    if test_status:
-        shutil.rmtree(os.path.dirname(fname))
-
-
-def verify_mac_not_found_in_pcap(ad, mac, packets):
-    """Verify that a mac address is not found in the captured packets.
-
-    Args:
-        ad: android device object
-        mac: string representation of the mac address
-        packets: packets obtained by rdpcap(pcap_fname)
-    """
-    for pkt in packets:
-        logging.debug("Packet Summary = %s", pkt.summary())
-        if mac in pkt.summary():
-            asserts.fail(
-                "Device %s caught Factory MAC: %s in packet sniffer."
-                "Packet = %s" % (ad.serial, mac, pkt.show())
-            )
-
-
-def verify_mac_is_found_in_pcap(ad, mac, packets):
-    """Verify that a mac address is found in the captured packets.
-
-    Args:
-        ad: android device object
-        mac: string representation of the mac address
-        packets: packets obtained by rdpcap(pcap_fname)
-    """
-    for pkt in packets:
-        if mac in pkt.summary():
-            return
-    asserts.fail(
-        "Did not find MAC = %s in packet sniffer." "for device %s" % (mac, ad.serial)
-    )
-
-
-def start_cnss_diags(ads, cnss_diag_file, pixel_models):
-    for ad in ads:
-        start_cnss_diag(ad, cnss_diag_file, pixel_models)
-
-
-def start_cnss_diag(ad, cnss_diag_file, pixel_models):
-    """Start cnss_diag to record extra wifi logs
-
-    Args:
-        ad: android device object.
-        cnss_diag_file: cnss diag config file to push to device.
-        pixel_models: pixel devices.
-    """
-    if ad.model not in pixel_models:
-        ad.log.info("Device not supported to collect pixel logger")
-        return
-    if ad.model in wifi_constants.DEVICES_USING_LEGACY_PROP:
-        prop = wifi_constants.LEGACY_CNSS_DIAG_PROP
-    else:
-        prop = wifi_constants.CNSS_DIAG_PROP
-    if ad.adb.getprop(prop) != "true":
-        if not int(
-            ad.adb.shell(
-                "ls -l %s%s | wc -l" % (CNSS_DIAG_CONFIG_PATH, CNSS_DIAG_CONFIG_FILE)
-            )
-        ):
-            ad.adb.push("%s %s" % (cnss_diag_file, CNSS_DIAG_CONFIG_PATH))
-        ad.adb.shell(
-            "find /data/vendor/wifi/cnss_diag/wlan_logs/ -type f -delete",
-            ignore_status=True,
-        )
-        ad.adb.shell("setprop %s true" % prop, ignore_status=True)
-
-
-def stop_cnss_diags(ads, pixel_models):
-    for ad in ads:
-        stop_cnss_diag(ad, pixel_models)
-
-
-def stop_cnss_diag(ad, pixel_models):
-    """Stops cnss_diag
-
-    Args:
-        ad: android device object.
-        pixel_models: pixel devices.
-    """
-    if ad.model not in pixel_models:
-        ad.log.info("Device not supported to collect pixel logger")
-        return
-    if ad.model in wifi_constants.DEVICES_USING_LEGACY_PROP:
-        prop = wifi_constants.LEGACY_CNSS_DIAG_PROP
-    else:
-        prop = wifi_constants.CNSS_DIAG_PROP
-    ad.adb.shell("setprop %s false" % prop, ignore_status=True)
-
-
-def get_cnss_diag_log(ad):
-    """Pulls the cnss_diag logs in the wlan_logs dir
-    Args:
-        ad: android device object.
-    """
-    logs = ad.get_file_names("/data/vendor/wifi/cnss_diag/wlan_logs/")
-    if logs:
-        ad.log.info("Pulling cnss_diag logs %s", logs)
-        log_path = os.path.join(ad.device_log_path, "CNSS_DIAG_%s" % ad.serial)
-        os.makedirs(log_path, exist_ok=True)
-        ad.pull_files(logs, log_path)
-
-
-LinkProbeResult = namedtuple(
-    "LinkProbeResult", ("is_success", "stdout", "elapsed_time", "failure_reason")
-)
-
-
-def send_link_probe(ad):
-    """Sends a link probe to the currently connected AP, and returns whether the
-    probe succeeded or not.
-
-    Args:
-         ad: android device object
-    Returns:
-        LinkProbeResult namedtuple
-    """
-    stdout = ad.adb.shell("cmd wifi send-link-probe")
-    asserts.assert_false(
-        "Error" in stdout or "Exception" in stdout,
-        "Exception while sending link probe: " + stdout,
-    )
-
-    is_success = False
-    elapsed_time = None
-    failure_reason = None
-    if "succeeded" in stdout:
-        is_success = True
-        elapsed_time = next(
-            (int(token) for token in stdout.split() if token.isdigit()), None
-        )
-    elif "failed with reason" in stdout:
-        failure_reason = next(
-            (int(token) for token in stdout.split() if token.isdigit()), None
-        )
-    else:
-        asserts.fail("Unexpected link probe result: " + stdout)
-
-    return LinkProbeResult(
-        is_success=is_success,
-        stdout=stdout,
-        elapsed_time=elapsed_time,
-        failure_reason=failure_reason,
-    )
-
-
-def send_link_probes(ad, num_probes, delay_sec):
-    """Sends a sequence of link probes to the currently connected AP, and
-    returns whether the probes succeeded or not.
-
-    Args:
-         ad: android device object
-         num_probes: number of probes to perform
-         delay_sec: delay time between probes, in seconds
-    Returns:
-        List[LinkProbeResult] one LinkProbeResults for each probe
-    """
-    logging.info("Sending link probes")
-    results = []
-    for _ in range(num_probes):
-        # send_link_probe() will also fail the test if it sees an exception
-        # in the stdout of the adb shell command
-        result = send_link_probe(ad)
-        logging.info("link probe results: " + str(result))
-        results.append(result)
-        time.sleep(delay_sec)
-
-    return results
-
-
-def ap_setup(test, index, ap, network, bandwidth=80, channel=6):
-    """Set up the AP with provided network info.
-
-    Args:
-        test: the calling test class object.
-        index: int, index of the AP.
-        ap: access_point object of the AP.
-        network: dict with information of the network, including ssid,
-                 password and bssid.
-        bandwidth: the operation bandwidth for the AP, default 80MHz.
-        channel: the channel number for the AP.
-    Returns:
-        brconfigs: the bridge interface configs
-    """
-    bss_settings = []
-    ssid = network[WifiEnums.SSID_KEY]
-    test.access_points[index].close()
-    time.sleep(5)
-
-    # Configure AP as required.
-    if "password" in network.keys():
-        password = network["password"]
-        security = hostapd_security.Security(security_mode="wpa", password=password)
-    else:
-        security = hostapd_security.Security(security_mode=None, password=None)
-    config = hostapd_ap_preset.create_ap_preset(
-        channel=channel,
-        ssid=ssid,
-        security=security,
-        bss_settings=bss_settings,
-        vht_bandwidth=bandwidth,
-        profile_name="whirlwind",
-        iface_wlan_2g=ap.wlan_2g,
-        iface_wlan_5g=ap.wlan_5g,
-    )
-    ap.start_ap(config)
-    logging.info("AP started on channel {} with SSID {}".format(channel, ssid))
-
-
-def turn_ap_off(test, AP):
-    """Bring down hostapd on the Access Point.
-    Args:
-        test: The test class object.
-        AP: int, indicating which AP to turn OFF.
-    """
-    hostapd_2g = test.access_points[AP - 1]._aps["wlan0"].hostapd
-    if hostapd_2g.is_alive():
-        hostapd_2g.stop()
-        logging.debug("Turned WLAN0 AP%d off" % AP)
-    hostapd_5g = test.access_points[AP - 1]._aps["wlan1"].hostapd
-    if hostapd_5g.is_alive():
-        hostapd_5g.stop()
-        logging.debug("Turned WLAN1 AP%d off" % AP)
-
-
-def turn_ap_on(test, AP):
-    """Bring up hostapd on the Access Point.
-    Args:
-        test: The test class object.
-        AP: int, indicating which AP to turn ON.
-    """
-    hostapd_2g = test.access_points[AP - 1]._aps["wlan0"].hostapd
-    if not hostapd_2g.is_alive():
-        hostapd_2g.start(hostapd_2g.config)
-        logging.debug("Turned WLAN0 AP%d on" % AP)
-    hostapd_5g = test.access_points[AP - 1]._aps["wlan1"].hostapd
-    if not hostapd_5g.is_alive():
-        hostapd_5g.start(hostapd_5g.config)
-        logging.debug("Turned WLAN1 AP%d on" % AP)
-
-
-def turn_location_off_and_scan_toggle_off(ad):
-    """Turns off wifi location scans."""
-    utils.set_location_service(ad, False)
-    ad.droid.wifiScannerToggleAlwaysAvailable(False)
-    msg = "Failed to turn off location service's scan."
-    asserts.assert_true(not ad.droid.wifiScannerIsAlwaysAvailable(), msg)
-
-
-def set_softap_channel(dut, ap_iface="wlan1", cs_count=10, channel=2462):
-    """Set SoftAP mode channel
-
-    Args:
-        dut: android device object
-        ap_iface: interface of SoftAP mode.
-        cs_count: how many beacon frames before switch channel, default = 10
-        channel: a wifi channel.
-    """
-    chan_switch_cmd = "hostapd_cli -i {} chan_switch {} {}"
-    chan_switch_cmd_show = chan_switch_cmd.format(ap_iface, cs_count, channel)
-    dut.log.info("adb shell {}".format(chan_switch_cmd_show))
-    chan_switch_result = dut.adb.shell(
-        chan_switch_cmd.format(ap_iface, cs_count, channel)
-    )
-    if chan_switch_result == "OK":
-        dut.log.info("switch hotspot channel to {}".format(channel))
-        return chan_switch_result
-
-    asserts.fail("Failed to switch hotspot channel")
-
-
-def get_wlan0_link(dut):
-    """get wlan0 interface status"""
-    get_wlan0 = "wpa_cli -iwlan0 -g@android:wpa_wlan0 IFNAME=wlan0 status"
-    out = dut.adb.shell(get_wlan0)
-    out = dict(re.findall(r'(\S+)=(".*?"|\S+)', out))
-    asserts.assert_true("ssid" in out, "Client doesn't connect to any network")
-    return out
-
-
-def verify_11ax_wifi_connection(ad, wifi6_supported_models, wifi6_ap):
-    """Verify 11ax for wifi connection.
-
-    Args:
-      ad: adndroid device object
-      wifi6_supported_models: device supporting 11ax.
-      wifi6_ap: if the AP supports 11ax.
-    """
-    if wifi6_ap and ad.model in wifi6_supported_models:
-        logging.info("Verifying 11ax. Model: %s" % ad.model)
-        asserts.assert_true(
-            ad.droid.wifiGetConnectionStandard() == wifi_constants.WIFI_STANDARD_11AX,
-            "DUT did not connect to 11ax.",
-        )
-
-
-def verify_11ax_softap(dut, dut_client, wifi6_supported_models):
-    """Verify 11ax SoftAp if devices support it.
-
-    Check if both DUT and DUT client supports 11ax, then SoftAp turns on
-    with 11ax mode and DUT client can connect to it.
-
-    Args:
-      dut: Softap device.
-      dut_client: Client connecting to softap.
-      wifi6_supported_models: List of device models supporting 11ax.
-    """
-    if (
-        dut.model in wifi6_supported_models
-        and dut_client.model in wifi6_supported_models
-    ):
-        logging.info(
-            "Verifying 11ax softap. DUT model: %s, DUT Client model: %s",
-            dut.model,
-            dut_client.model,
-        )
-        asserts.assert_true(
-            dut_client.droid.wifiGetConnectionStandard()
-            == wifi_constants.WIFI_STANDARD_11AX,
-            "DUT failed to start SoftAp in 11ax.",
-        )
-
-
-def check_available_channels_in_bands_2_5(dut, country_code):
-    """Check if DUT is capable of enable BridgedAp.
-    #TODO: Find a way to make this function flexible by taking an argument.
-
-    Args:
-        country_code: country code, e.g., 'US', 'JP'.
-    Returns:
-        True: If DUT is capable of enable BridgedAp.
-        False: If DUT is not capable of enable BridgedAp.
-    """
-    set_wifi_country_code(dut, country_code)
-    country = dut.droid.wifiGetCountryCode()
-    dut.log.info("DUT current country code : {}".format(country))
-    # Wi-Fi ON and OFF to make sure country code take effet.
-    wifi_toggle_state(dut, True)
-    wifi_toggle_state(dut, False)
-
-    # Register SoftAp Callback and get SoftAp capability.
-    callbackId = dut.droid.registerSoftApCallback()
-    capability = get_current_softap_capability(dut, callbackId, True)
-    dut.droid.unregisterSoftApCallback(callbackId)
-
-    if (
-        capability[wifi_constants.SOFTAP_CAPABILITY_24GHZ_SUPPORTED_CHANNEL_LIST]
-        and capability[wifi_constants.SOFTAP_CAPABILITY_5GHZ_SUPPORTED_CHANNEL_LIST]
-    ):
-        return True
-    return False
-
-
-@retry(stop=stop_after_attempt(5), wait=wait_fixed(2))
-def validate_ping_between_two_clients(dut1, dut2):
-    """Make 2 DUT ping each other.
-
-    Args:
-        dut1: An AndroidDevice object.
-        dut2: An AndroidDevice object.
-    """
-    # Get DUTs' IPv4 addresses.
-    dut1_ip = ""
-    dut2_ip = ""
-    try:
-        dut1_ip = dut1.droid.connectivityGetIPv4Addresses("wlan0")[0]
-    except IndexError as e:
-        dut1.log.info(
-            "{} has no Wi-Fi connection, cannot get IPv4 address.".format(dut1.serial)
-        )
-    try:
-        dut2_ip = dut2.droid.connectivityGetIPv4Addresses("wlan0")[0]
-    except IndexError as e:
-        dut2.log.info(
-            "{} has no Wi-Fi connection, cannot get IPv4 address.".format(dut2.serial)
-        )
-    # Test fail if not able to obtain two DUT's IPv4 addresses.
-    asserts.assert_true(
-        dut1_ip and dut2_ip, "Ping failed because no DUT's IPv4 address"
-    )
-
-    dut1.log.info("{} IPv4 addresses : {}".format(dut1.serial, dut1_ip))
-    dut2.log.info("{} IPv4 addresses : {}".format(dut2.serial, dut2_ip))
-
-    # Two clients ping each other
-    dut1.log.info("{} ping {}".format(dut1_ip, dut2_ip))
-    asserts.assert_true(
-        utils.adb_shell_ping(dut1, count=10, dest_ip=dut2_ip, timeout=20),
-        "%s ping %s failed" % (dut1.serial, dut2_ip),
-    )
-
-    dut2.log.info("{} ping {}".format(dut2_ip, dut1_ip))
-    asserts.assert_true(
-        utils.adb_shell_ping(dut2, count=10, dest_ip=dut1_ip, timeout=20),
-        "%s ping %s failed" % (dut2.serial, dut1_ip),
-    )
diff --git a/src/antlion/tests/flash/FlashTest.py b/src/antlion/tests/flash/FlashTest.py
deleted file mode 100644
index 7c5399f..0000000
--- a/src/antlion/tests/flash/FlashTest.py
+++ /dev/null
@@ -1,123 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Script for to flash Fuchsia devices and reports the DUT's version of Fuchsia in
-the Sponge test result properties. Uses the built in flashing tool for
-fuchsia_devices.
-"""
-
-import logging
-from typing import List
-
-from antlion.controllers import fuchsia_device, pdu
-from antlion.controllers.fuchsia_device import FuchsiaDevice
-from antlion.controllers.pdu import PduDevice
-from antlion.utils import get_device
-
-from mobly import asserts, base_test, signals, test_runner
-
-MAX_FLASH_ATTEMPTS = 3
-
-
-class FlashTest(base_test.BaseTestClass):
-    def setup_class(self):
-        self.log = logging.getLogger()
-        self.fuchsia_devices: List[FuchsiaDevice] = self.register_controller(
-            fuchsia_device
-        )
-        self.pdu_devices: List[PduDevice] = self.register_controller(pdu)
-        self.failed_to_get_device_info = False
-
-    def teardown_class(self):
-        # Verify that FlashTest successfully reported the DUT version. This is
-        # working around a flaw in ACTS where signals.TestAbortAll does not
-        # report any errors.
-        #
-        # TODO(http://b/253515812): This has been fixed in Mobly already. Remove
-        # teardown_class and change "TestError" to "abort_all" in
-        # test_flash_devices once we move to Mobly.
-        if self.failed_to_get_device_info:
-            asserts.abort_all("Failed to get DUT device information")
-
-        return super().teardown_class()
-
-    def test_flash_devices(self) -> None:
-        """Flashes a Fuchsia device for testing.
-
-        This method calls the fuchsia_device reboot() with 'flash' argument.
-        This kicks off a flash, not pave, of the fuchsia device. It also soft
-        reboots the device. On error it will attempt to reflash up to
-        MAX_FLASH_ATTEMPTS hard rebooting inbetween each attempt.
-        """
-        for device in self.fuchsia_devices:
-            flash_counter = 0
-            while True:
-                try:
-                    device.reboot(
-                        reboot_type="flash", use_ssh=True, unreachable_timeout=120
-                    )
-                    self.log.info(f"{device.orig_ip} has been flashed.")
-                    break
-                except Exception as err:
-                    self.log.error(
-                        f"Failed to flash {device.orig_ip} with error:\n{err}"
-                    )
-
-                    if not device.device_pdu_config:
-                        asserts.abort_all(
-                            f"Failed to flash {device.orig_ip} and no PDU"
-                            "available for hard reboot"
-                        )
-
-                    flash_counter = flash_counter + 1
-                    if flash_counter == MAX_FLASH_ATTEMPTS:
-                        asserts.abort_all(
-                            f"Failed to flash {device.orig_ip} after"
-                            f"{MAX_FLASH_ATTEMPTS} attempts"
-                        )
-
-                    self.log.info(
-                        f"Hard rebooting {device.orig_ip} and retrying flash."
-                    )
-                    device.reboot(reboot_type="hard", testbed_pdus=self.pdu_devices)
-
-        # Report the new Fuchsia version
-        try:
-            dut = get_device(self.fuchsia_devices, "DUT")
-            version = dut.version()
-            device_name = dut.device_name()
-            product_name = dut.product_name()
-
-            self.record_data(
-                {
-                    "sponge_properties": {
-                        "DUT_VERSION": version,
-                        "DUT_NAME": device_name,
-                        "DUT_PRODUCT": product_name,
-                    },
-                }
-            )
-
-            self.log.info(f"DUT version: {version}")
-            self.log.info(f"DUT name: {device_name}")
-            self.log.info(f"DUT product: {product_name}")
-        except Exception as e:
-            self.failed_to_get_device_info = True
-            raise signals.TestError(f"Failed to get DUT device information: {e}") from e
-
-
-if __name__ == "__main__":
-    test_runner.main()
diff --git a/src/antlion/tests/logging/FuchsiaLoggingTest.py b/src/antlion/tests/logging/FuchsiaLoggingTest.py
deleted file mode 100644
index a5e2db8..0000000
--- a/src/antlion/tests/logging/FuchsiaLoggingTest.py
+++ /dev/null
@@ -1,61 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from mobly import asserts, base_test, signals, test_runner
-from typing import List
-
-from antlion.controllers import fuchsia_device
-from antlion.controllers.fuchsia_device import FuchsiaDevice
-
-MESSAGE = "Logging Test"
-
-
-class FuchsiaLoggingTest(base_test.BaseTestClass):
-    def setup_class(self):
-        self.fuchsia_devices: List[FuchsiaDevice] = self.register_controller(
-            fuchsia_device
-        )
-
-        asserts.abort_class_if(
-            len(self.fuchsia_devices) == 0, "Requires at least one Fuchsia device"
-        )
-
-        self.dut = self.fuchsia_devices[0]
-
-    def test_log_err(self):
-        result = self.dut.sl4f.logging_lib.logE(MESSAGE)
-        if result.get("error") is None:
-            signals.TestPass(result.get("result"))
-        else:
-            signals.TestFailure(result.get("error"))
-
-    def test_log_info(self):
-        result = self.dut.sl4f.logging_lib.logI(MESSAGE)
-        if result.get("error") is None:
-            signals.TestPass(result.get("result"))
-        else:
-            signals.TestFailure(result.get("error"))
-
-    def test_log_warn(self):
-        result = self.dut.sl4f.logging_lib.logW(MESSAGE)
-        if result.get("error") is None:
-            signals.TestPass(result.get("result"))
-        else:
-            signals.TestFailure(result.get("error"))
-
-
-if __name__ == "__main__":
-    test_runner.main()
diff --git a/src/antlion/tests/netstack/NetstackIfaceTest.py b/src/antlion/tests/netstack/NetstackIfaceTest.py
deleted file mode 100644
index fce3197..0000000
--- a/src/antlion/tests/netstack/NetstackIfaceTest.py
+++ /dev/null
@@ -1,164 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-from typing import List
-
-from antlion.controllers import fuchsia_device
-from antlion.controllers.fuchsia_device import FuchsiaDevice
-
-from mobly import asserts, signals, test_runner, base_test
-
-
-class NetstackIfaceTest(base_test.BaseTestClass):
-    default_timeout = 10
-    active_scan_callback_list = []
-    active_adv_callback_list = []
-    droid = None
-
-    def setup_class(self):
-        self.log = logging.getLogger()
-        self.fuchsia_devices: List[FuchsiaDevice] = self.register_controller(
-            fuchsia_device
-        )
-
-        asserts.abort_class_if(
-            len(self.fuchsia_devices) == 0, "Requires at least one Fuchsia device"
-        )
-
-        self.dut = self.fuchsia_devices[0]
-
-    def _enable_all_interfaces(self):
-        interfaces = self.dut.sl4f.netstack_lib.netstackListInterfaces()
-        for item in interfaces.get("result"):
-            identifier = item.get("id")
-            self.dut.sl4f.netstack_lib.enableInterface(identifier)
-
-    def setup_test(self):
-        # Always make sure all interfaces listed are in an up state.
-        self._enable_all_interfaces()
-
-    def teardown_test(self):
-        # Always make sure all interfaces listed are in an up state.
-        self._enable_all_interfaces()
-
-    def test_list_interfaces(self):
-        """Test listing all interfaces.
-
-        Steps:
-        1. Call ListInterfaces FIDL api.
-        2. Verify there is at least one interface returned.
-
-        Expected Result:
-        There were no errors in retrieving the list of interfaces.
-        There was at least one interface in the list.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during the test.
-
-        TAGS: Netstack
-        Priority: 1
-        """
-        interfaces = self.dut.sl4f.netstack_lib.netstackListInterfaces()
-        if interfaces.get("error") is not None:
-            raise signals.TestFailure("Failed with {}".format(interfaces.get("error")))
-        if len(interfaces.get("result")) < 1:
-            raise signals.TestFailure("No interfaces found.")
-        self.log.info("Interfaces found: {}".format(interfaces.get("result")))
-        raise signals.TestPass("Success")
-
-    def test_toggle_wlan_interface(self):
-        """Test toggling the wlan interface if it exists.
-
-        Steps:
-        1. Call ListInterfaces FIDL api.
-        2. Find the wlan interface.
-        3. Disable the interface.
-        4. Verify interface attributes in a down state.
-        5. Enable the interface.
-        6. Verify interface attributes in an up state.
-
-        Expected Result:
-        WLAN interface was successfully brought down and up again.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during the test.
-          signals.TestSkip if there are no wlan interfaces.
-
-        TAGS: Netstack
-        Priority: 1
-        """
-
-        def get_wlan_interfaces():
-            result = self.dut.sl4f.netstack_lib.netstackListInterfaces()
-            if error := result.get("error"):
-                raise signals.TestFailure(f"unable to list interfaces: {error}")
-            return [
-                interface
-                for interface in result.get("result")
-                if "wlan" in interface.get("name")
-            ]
-
-        def get_ids(interfaces):
-            return [get_id(interface) for interface in interfaces]
-
-        wlan_interfaces = get_wlan_interfaces()
-        if not wlan_interfaces:
-            raise signals.TestSkip("no wlan interface found")
-        interface_ids = get_ids(wlan_interfaces)
-
-        # Disable the interfaces.
-        for identifier in interface_ids:
-            result = self.dut.sl4f.netstack_lib.disableInterface(identifier)
-            if error := result.get("error"):
-                raise signals.TestFailure(
-                    f"failed to disable wlan interface {identifier}: {error}"
-                )
-
-        # Retrieve the interfaces again.
-        disabled_wlan_interfaces = get_wlan_interfaces()
-        disabled_interface_ids = get_ids(wlan_interfaces)
-
-        if not disabled_interface_ids == interface_ids:
-            raise signals.TestFailure(
-                f"disabled interface IDs do not match original interface IDs: original={interface_ids} disabled={disabled_interface_ids}"
-            )
-
-        # Check the current state of the interfaces.
-        for interface in disabled_interfaces:
-            if len(interface_info.get("ipv4_addresses")) > 0:
-                raise signals.TestFailure(
-                    f"no Ipv4 Address should be present: {interface}"
-                )
-
-            # TODO (35981): Verify other values when interface down.
-
-        # Re-enable the interfaces.
-        for identifier in disabled_interface_ids:
-            result = self.dut.sl4f.netstack_lib.enableInterface(identifier)
-            if error := result.get("error"):
-                raise signals.TestFailure(
-                    f"failed to enable wlan interface {identifier}: {error}"
-                )
-
-        # TODO (35981): Verify other values when interface up.
-        raise signals.TestPass("Success")
-
-
-if __name__ == "__main__":
-    test_runner.main()
diff --git a/src/antlion/tests/netstack/ToggleWlanInterfaceStressTest.py b/src/antlion/tests/netstack/ToggleWlanInterfaceStressTest.py
deleted file mode 100644
index 36b52ad..0000000
--- a/src/antlion/tests/netstack/ToggleWlanInterfaceStressTest.py
+++ /dev/null
@@ -1,95 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import time
-from typing import List
-
-from antlion.controllers import fuchsia_device
-from antlion.controllers.fuchsia_device import FuchsiaDevice
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-
-from mobly import asserts, base_test, signals, test_runner
-
-
-class ToggleWlanInterfaceStressTest(base_test.BaseTestClass):
-    def setup_class(self):
-        self.log = logging.getLogger()
-        self.fuchsia_devices: List[FuchsiaDevice] = self.register_controller(
-            fuchsia_device
-        )
-
-        asserts.abort_class_if(
-            len(self.fuchsia_devices) == 0, "Requires at least one Fuchsia device"
-        )
-
-        self.dut = create_wlan_device(self.fuchsia_devices[0])
-
-    def test_iface_toggle_and_ping(self):
-        """Test that we don't error out when toggling WLAN interfaces.
-
-        Steps:
-        1. Find a WLAN interface
-        2. Destroy it
-        3. Create a new WLAN interface
-        4. Ping after association
-        5. Repeat 1-4 1,000 times
-
-        Expected Result:
-        Verify there are no errors in destroying the wlan interface.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during the test.
-
-        TAGS: WLAN, Stability
-        Priority: 1
-        """
-
-        # Test assumes you've already connected to some AP.
-
-        for i in range(1000):
-            wlan_interfaces = self.dut.get_wlan_interface_id_list()
-            print(wlan_interfaces)
-            if len(wlan_interfaces) < 1:
-                raise signals.TestFailure("Not enough wlan interfaces for test")
-            if not self.dut.destroy_wlan_interface(wlan_interfaces[0]):
-                raise signals.TestFailure("Failed to destroy WLAN interface")
-            # Really make sure it is dead
-            self.fuchsia_devices[0].ssh.run(f"wlan iface del {wlan_interfaces[0]}")
-            # Grace period
-            time.sleep(2)
-            self.fuchsia_devices[0].ssh.run("wlan iface new --phy 0 --role Client")
-            end_time = time.time() + 300
-            while time.time() < end_time:
-                time.sleep(1)
-                if self.dut.is_connected():
-                    try:
-                        ping_result = self.dut.ping("8.8.8.8", 10, 1000, 1000, 25)
-                        print(ping_result)
-                    except Exception as err:
-                        # TODO: Once we gain more stability, fail test when pinging fails
-                        print("some err {}".format(err))
-                    time.sleep(2)  # give time for some traffic
-                    break
-            if not self.dut.is_connected():
-                raise signals.TestFailure("Failed at iteration {}".format(i + 1))
-            self.log.info("Iteration {} successful".format(i + 1))
-        raise signals.TestPass("Success")
-
-
-if __name__ == "__main__":
-    test_runner.main()
diff --git a/src/antlion/tests/wlan/compliance/BUILD.gn b/src/antlion/tests/wlan/compliance/BUILD.gn
deleted file mode 100644
index bdfc396..0000000
--- a/src/antlion/tests/wlan/compliance/BUILD.gn
+++ /dev/null
@@ -1,44 +0,0 @@
-# Copyright 2023 The Fuchsia Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//third_party/antlion/antlion_host_test.gni")
-import("//third_party/antlion/environments.gni")
-
-assert(is_host, "antlion tests only supported for host testing")
-
-antlion_host_test("vape_interop_test") {
-  main_source = "VapeInteropTest.py"
-  environments = display_ap_envs
-}
-
-antlion_host_test("wlan_phy_compliance_11ac_test") {
-  main_source = "WlanPhyCompliance11ACTest.py"
-  environments = display_ap_envs
-}
-
-antlion_host_test("wlan_phy_compliance_11n_test") {
-  main_source = "WlanPhyCompliance11NTest.py"
-  environments = display_ap_envs
-}
-
-antlion_host_test("wlan_phy_compliance_abg_test") {
-  main_source = "WlanPhyComplianceABGTest.py"
-  environments = display_ap_envs
-}
-
-antlion_host_test("wlan_security_compliance_abg_test") {
-  main_source = "WlanSecurityComplianceABGTest.py"
-  environments = display_ap_envs
-}
-
-group("e2e_tests") {
-  testonly = true
-  public_deps = [
-    ":vape_interop_test($host_toolchain)",
-    ":wlan_phy_compliance_11ac_test($host_toolchain)",
-    ":wlan_phy_compliance_11n_test($host_toolchain)",
-    ":wlan_phy_compliance_abg_test($host_toolchain)",
-    ":wlan_security_compliance_abg_test($host_toolchain)",
-  ]
-}
diff --git a/src/antlion/tests/wlan/compliance/WlanPhyCompliance11ACTest.py b/src/antlion/tests/wlan/compliance/WlanPhyCompliance11ACTest.py
deleted file mode 100644
index 4b797a5..0000000
--- a/src/antlion/tests/wlan/compliance/WlanPhyCompliance11ACTest.py
+++ /dev/null
@@ -1,312 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import itertools
-
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib.hostapd_security import Security
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-from antlion.test_utils.wifi import base_test
-from antlion.utils import rand_ascii_str
-
-from mobly import asserts, test_runner
-
-# AC Capabilities
-"""
-Capabilities Not Supported on Whirlwind:
-    - Supported Channel Width ([VHT160], [VHT160-80PLUS80]): 160mhz and 80+80
-        unsupported
-    - SU Beamformer [SU-BEAMFORMER]
-    - SU Beamformee [SU-BEAMFORMEE]
-    - MU Beamformer [MU-BEAMFORMER]
-    - MU Beamformee [MU-BEAMFORMEE]
-    - BF Antenna ([BF-ANTENNA-2], [BF-ANTENNA-3], [BF-ANTENNA-4])
-    - Rx STBC 2, 3, & 4 ([RX-STBC-12],[RX-STBC-123],[RX-STBC-124])
-    - VHT Link Adaptation ([VHT-LINK-ADAPT2],[VHT-LINK-ADAPT3])
-    - VHT TXOP Power Save [VHT-TXOP-PS]
-    - HTC-VHT [HTC-VHT]
-"""
-VHT_MAX_MPDU_LEN = [
-    hostapd_constants.AC_CAPABILITY_MAX_MPDU_7991,
-    hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
-    "",
-]
-RXLDPC = [hostapd_constants.AC_CAPABILITY_RXLDPC, ""]
-SHORT_GI_80 = [hostapd_constants.AC_CAPABILITY_SHORT_GI_80, ""]
-TX_STBC = [hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1, ""]
-RX_STBC = [hostapd_constants.AC_CAPABILITY_RX_STBC_1, ""]
-MAX_A_MPDU = [
-    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP0,
-    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP1,
-    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP2,
-    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP3,
-    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP4,
-    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP5,
-    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP6,
-    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
-    "",
-]
-RX_ANTENNA = [hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN, ""]
-TX_ANTENNA = [hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN, ""]
-
-# Default 11N Capabilities
-N_CAPABS_40MHZ = [
-    hostapd_constants.N_CAPABILITY_LDPC,
-    hostapd_constants.N_CAPABILITY_SGI20,
-    hostapd_constants.N_CAPABILITY_RX_STBC1,
-    hostapd_constants.N_CAPABILITY_SGI20,
-    hostapd_constants.N_CAPABILITY_SGI40,
-    hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
-    hostapd_constants.N_CAPABILITY_HT40_PLUS,
-]
-
-N_CAPABS_20MHZ = [
-    hostapd_constants.N_CAPABILITY_LDPC,
-    hostapd_constants.N_CAPABILITY_SGI20,
-    hostapd_constants.N_CAPABILITY_RX_STBC1,
-    hostapd_constants.N_CAPABILITY_SGI20,
-    hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935,
-    hostapd_constants.N_CAPABILITY_HT20,
-]
-
-# Default wpa2 profile.
-WPA2_SECURITY = Security(
-    security_mode=hostapd_constants.WPA2_STRING,
-    password=rand_ascii_str(20),
-    wpa_cipher=hostapd_constants.WPA2_DEFAULT_CIPER,
-    wpa2_cipher=hostapd_constants.WPA2_DEFAULT_CIPER,
-)
-
-SECURITIES = [None, WPA2_SECURITY]
-
-
-def generate_test_name(settings):
-    """Generates a test name string based on the ac_capabilities for
-    a test case.
-
-    Args:
-        settings: a dict with the test settings (bandwidth, security, ac_capabs)
-
-    Returns:
-        A string test case name
-    """
-    chbw = settings["chbw"]
-    sec = "wpa2" if settings["security"] else "open"
-    ret = []
-    for cap in hostapd_constants.AC_CAPABILITIES_MAPPING.keys():
-        if cap in settings["ac_capabilities"]:
-            ret.append(hostapd_constants.AC_CAPABILITIES_MAPPING[cap])
-    return "test_11ac_%smhz_%s_%s" % (chbw, sec, "".join(ret))
-
-
-# 6912 test cases
-class WlanPhyCompliance11ACTest(base_test.WifiBaseTest):
-    """Tests for validating 11ac PHYS.
-
-    Test Bed Requirement:
-    * One Android device or Fuchsia device
-    * One Access Point
-    """
-
-    def __init__(self, controllers):
-        super().__init__(controllers)
-
-    def setup_generated_tests(self):
-        test_args = (
-            self._generate_20mhz_test_args()
-            + self._generate_40mhz_test_args()
-            + self._generate_80mhz_test_args()
-        )
-        self.generate_tests(
-            test_logic=self.setup_and_connect,
-            name_func=generate_test_name,
-            arg_sets=test_args,
-        )
-
-    def setup_class(self):
-        super().setup_class()
-
-        device_type = self.user_params.get("dut", "fuchsia_devices")
-        if device_type == "fuchsia_devices":
-            self.dut = create_wlan_device(self.fuchsia_devices[0])
-        elif device_type == "android_devices":
-            self.dut = create_wlan_device(self.android_devices[0])
-        else:
-            raise ValueError(
-                f'Invalid "dut" type specified in config: "{device_type}".'
-                'Expected "fuchsia_devices" or "android_devices".'
-            )
-
-        self.access_point = self.access_points[0]
-        self.android_devices = getattr(self, "android_devices", [])
-        self.access_point.stop_all_aps()
-
-    def setup_test(self):
-        for ad in self.android_devices:
-            ad.droid.wakeLockAcquireBright()
-            ad.droid.wakeUpNow()
-        self.dut.wifi_toggle_state(True)
-
-    def teardown_test(self):
-        for ad in self.android_devices:
-            ad.droid.wakeLockRelease()
-            ad.droid.goToSleepNow()
-        self.dut.turn_location_off_and_scan_toggle_off()
-        self.dut.disconnect()
-        self.dut.reset_wifi()
-        self.download_ap_logs()
-        self.access_point.stop_all_aps()
-
-    def on_fail(self, test_name, begin_time):
-        super().on_fail(test_name, begin_time)
-        self.access_point.stop_all_aps()
-
-    def setup_and_connect(self, ap_settings):
-        """Uses ap_settings to set up ap and then attempts to associate a DUT.
-
-        Args:
-            ap_settings: a dict containing test case settings, including
-                bandwidth, security, n_capabilities, and ac_capabilities
-
-        """
-        ssid = rand_ascii_str(20)
-        security = ap_settings["security"]
-        chbw = ap_settings["chbw"]
-        password = None
-        target_security = None
-        if security:
-            password = security.password
-            target_security = security.security_mode_string
-        n_capabilities = ap_settings["n_capabilities"]
-        ac_capabilities = ap_settings["ac_capabilities"]
-
-        setup_ap(
-            access_point=self.access_point,
-            profile_name="whirlwind",
-            mode=hostapd_constants.MODE_11AC_MIXED,
-            channel=36,
-            n_capabilities=n_capabilities,
-            ac_capabilities=ac_capabilities,
-            force_wmm=True,
-            ssid=ssid,
-            security=security,
-            vht_bandwidth=chbw,
-            password=password,
-        )
-        asserts.assert_true(
-            self.dut.associate(
-                ssid, target_pwd=password, target_security=target_security
-            ),
-            "Failed to associate.",
-        )
-
-    # 1728 tests
-    def _generate_20mhz_test_args(self):
-        test_args = []
-
-        # 864 test cases for open security
-        # 864 test cases for wpa2 security
-        for combination in itertools.product(
-            SECURITIES,
-            VHT_MAX_MPDU_LEN,
-            RXLDPC,
-            RX_STBC,
-            TX_STBC,
-            MAX_A_MPDU,
-            RX_ANTENNA,
-            TX_ANTENNA,
-        ):
-            security = combination[0]
-            ac_capabilities = combination[1:]
-            test_args.append(
-                (
-                    {
-                        "chbw": 20,
-                        "security": security,
-                        "n_capabilities": N_CAPABS_20MHZ,
-                        "ac_capabilities": ac_capabilities,
-                    },
-                )
-            )
-
-        return test_args
-
-    # 1728 tests
-    def _generate_40mhz_test_args(self):
-        test_args = []
-
-        # 864 test cases for open security
-        # 864 test cases for wpa2 security
-        for combination in itertools.product(
-            SECURITIES,
-            VHT_MAX_MPDU_LEN,
-            RXLDPC,
-            RX_STBC,
-            TX_STBC,
-            MAX_A_MPDU,
-            RX_ANTENNA,
-            TX_ANTENNA,
-        ):
-            security = combination[0]
-            ac_capabilities = combination[1:]
-            test_args.append(
-                (
-                    {
-                        "chbw": 40,
-                        "security": security,
-                        "n_capabilities": N_CAPABS_40MHZ,
-                        "ac_capabilities": ac_capabilities,
-                    },
-                )
-            )
-
-        return test_args
-
-    # 3456 tests
-    def _generate_80mhz_test_args(self):
-        test_args = []
-
-        # 1728 test cases for open security
-        # 1728 test cases for wpa2 security
-        for combination in itertools.product(
-            SECURITIES,
-            VHT_MAX_MPDU_LEN,
-            RXLDPC,
-            SHORT_GI_80,
-            RX_STBC,
-            TX_STBC,
-            MAX_A_MPDU,
-            RX_ANTENNA,
-            TX_ANTENNA,
-        ):
-            security = combination[0]
-            ac_capabilities = combination[1:]
-            test_args.append(
-                (
-                    {
-                        "chbw": 80,
-                        "security": security,
-                        "n_capabilities": N_CAPABS_40MHZ,
-                        "ac_capabilities": ac_capabilities,
-                    },
-                )
-            )
-        return test_args
-
-
-if __name__ == "__main__":
-    test_runner.main()
diff --git a/src/antlion/tests/wlan/compliance/WlanPhyCompliance11NTest.py b/src/antlion/tests/wlan/compliance/WlanPhyCompliance11NTest.py
deleted file mode 100644
index 244953e..0000000
--- a/src/antlion/tests/wlan/compliance/WlanPhyCompliance11NTest.py
+++ /dev/null
@@ -1,593 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import itertools
-
-from antlion import utils
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_config
-from antlion.controllers.ap_lib.hostapd_security import Security
-from antlion.controllers.ap_lib.hostapd_utils import generate_random_password
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-from antlion.test_utils.wifi import base_test
-
-from mobly import asserts, test_runner
-
-FREQUENCY_24 = ["2.4GHz"]
-FREQUENCY_5 = ["5GHz"]
-CHANNEL_BANDWIDTH_20 = ["HT20"]
-CHANNEL_BANDWIDTH_40_LOWER = ["HT40-"]
-CHANNEL_BANDWIDTH_40_UPPER = ["HT40+"]
-SECURITY_OPEN = "open"
-SECURITY_WPA2 = "wpa2"
-N_MODE = [hostapd_constants.MODE_11N_PURE, hostapd_constants.MODE_11N_MIXED]
-LDPC = [hostapd_constants.N_CAPABILITY_LDPC, ""]
-TX_STBC = [hostapd_constants.N_CAPABILITY_TX_STBC, ""]
-RX_STBC = [hostapd_constants.N_CAPABILITY_RX_STBC1, ""]
-SGI_20 = [hostapd_constants.N_CAPABILITY_SGI20, ""]
-SGI_40 = [hostapd_constants.N_CAPABILITY_SGI40, ""]
-DSSS_CCK = [hostapd_constants.N_CAPABILITY_DSSS_CCK_40, ""]
-INTOLERANT_40 = [hostapd_constants.N_CAPABILITY_40_INTOLERANT, ""]
-MAX_AMPDU_7935 = [hostapd_constants.N_CAPABILITY_MAX_AMSDU_7935, ""]
-SMPS = [hostapd_constants.N_CAPABILITY_SMPS_STATIC, ""]
-
-
-def generate_test_name(settings):
-    """Generates a string based on the n_capabilities for a test case
-
-    Args:
-        settings: A dictionary of hostapd constant n_capabilities.
-
-    Returns:
-        A string that represents a test case name.
-    """
-    ret = []
-    for cap in hostapd_constants.N_CAPABILITIES_MAPPING.keys():
-        if cap in settings["n_capabilities"]:
-            ret.append(hostapd_constants.N_CAPABILITIES_MAPPING[cap])
-    # '+' is used by Mobile Harness as special character, don't use it in test names
-    if settings["chbw"] == "HT40-":
-        chbw = "HT40Lower"
-    elif settings["chbw"] == "HT40+":
-        chbw = "HT40Upper"
-    else:
-        chbw = settings["chbw"]
-    return "test_11n_%s_%s_%s_%s_%s" % (
-        settings["frequency"],
-        chbw,
-        settings["security"],
-        settings["n_mode"],
-        "".join(ret),
-    )
-
-
-class WlanPhyCompliance11NTest(base_test.WifiBaseTest):
-    """Tests for validating 11n PHYS.
-
-    Test Bed Requirement:
-    * One Android device or Fuchsia device
-    * One Access Point
-    """
-
-    def __init__(self, controllers):
-        super().__init__(controllers)
-
-    def setup_generated_tests(self):
-        test_args = (
-            self._generate_24_HT20_test_args()
-            + self._generate_24_HT40_lower_test_args()
-            + self._generate_24_HT40_upper_test_args()
-            + self._generate_5_HT20_test_args()
-            + self._generate_5_HT40_lower_test_args()
-            + self._generate_5_HT40_upper_test_args()
-            + self._generate_24_HT20_wpa2_test_args()
-            + self._generate_24_HT40_lower_wpa2_test_args()
-            + self._generate_24_HT40_upper_wpa2_test_args()
-            + self._generate_5_HT20_wpa2_test_args()
-            + self._generate_5_HT40_lower_wpa2_test_args()
-            + self._generate_5_HT40_upper_wpa2_test_args()
-        )
-
-        self.generate_tests(
-            test_logic=self.setup_and_connect,
-            name_func=generate_test_name,
-            arg_sets=test_args,
-        )
-
-    def setup_class(self):
-        super().setup_class()
-
-        device_type = self.user_params.get("dut", "fuchsia_devices")
-        if device_type == "fuchsia_devices":
-            self.dut = create_wlan_device(self.fuchsia_devices[0])
-        elif device_type == "android_devices":
-            self.dut = create_wlan_device(self.android_devices[0])
-        else:
-            raise ValueError(
-                f'Invalid "dut" type specified in config: "{device_type}".'
-                'Expected "fuchsia_devices" or "android_devices".'
-            )
-
-        self.access_point = self.access_points[0]
-        self.access_point.stop_all_aps()
-
-    def setup_test(self):
-        if hasattr(self, "android_devices"):
-            for ad in self.android_devices:
-                ad.droid.wakeLockAcquireBright()
-                ad.droid.wakeUpNow()
-        self.dut.wifi_toggle_state(True)
-
-    def teardown_test(self):
-        if hasattr(self, "android_devices"):
-            for ad in self.android_devices:
-                ad.droid.wakeLockRelease()
-                ad.droid.goToSleepNow()
-        self.dut.turn_location_off_and_scan_toggle_off()
-        self.dut.disconnect()
-        self.dut.reset_wifi()
-        self.download_ap_logs()
-        self.access_point.stop_all_aps()
-
-    def on_fail(self, test_name, begin_time):
-        super().on_fail(test_name, begin_time)
-        self.access_point.stop_all_aps()
-
-    def setup_and_connect(self, ap_settings):
-        """Generates a hostapd config, setups up the AP with that config, then
-           attempts to associate a DUT
-
-        Args:
-               ap_settings: A dictionary of hostapd constant n_capabilities.
-        """
-        ssid = utils.rand_ascii_str(20)
-        security_profile = None
-        password = None
-        temp_n_capabilities = list(ap_settings["n_capabilities"])
-        n_capabilities = []
-        for n_capability in temp_n_capabilities:
-            if n_capability in hostapd_constants.N_CAPABILITIES_MAPPING.keys():
-                n_capabilities.append(n_capability)
-
-        if ap_settings["chbw"] == "HT20" or ap_settings["chbw"] == "HT40+":
-            if ap_settings["frequency"] == "2.4GHz":
-                channel = 1
-            elif ap_settings["frequency"] == "5GHz":
-                channel = 36
-            else:
-                raise ValueError("Invalid frequence: %s" % ap_settings["frequency"])
-
-        elif ap_settings["chbw"] == "HT40-":
-            if ap_settings["frequency"] == "2.4GHz":
-                channel = 11
-            elif ap_settings["frequency"] == "5GHz":
-                channel = 60
-            else:
-                raise ValueError("Invalid frequency: %s" % ap_settings["frequency"])
-
-        else:
-            raise ValueError("Invalid channel bandwidth: %s" % ap_settings["chbw"])
-
-        if ap_settings["chbw"] == "HT40-" or ap_settings["chbw"] == "HT40+":
-            if hostapd_config.ht40_plus_allowed(channel):
-                extended_channel = hostapd_constants.N_CAPABILITY_HT40_PLUS
-            elif hostapd_config.ht40_minus_allowed(channel):
-                extended_channel = hostapd_constants.N_CAPABILITY_HT40_MINUS
-            else:
-                raise ValueError("Invalid channel: %s" % channel)
-            n_capabilities.append(extended_channel)
-
-        if ap_settings["security"] == "wpa2":
-            security_profile = Security(
-                security_mode=SECURITY_WPA2,
-                password=generate_random_password(length=20),
-                wpa_cipher="CCMP",
-                wpa2_cipher="CCMP",
-            )
-            password = security_profile.password
-        target_security = (
-            hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
-                ap_settings["security"], None
-            )
-        )
-
-        mode = ap_settings["n_mode"]
-        if mode not in N_MODE:
-            raise ValueError("Invalid n-mode: %s" % ap_settings["n-mode"])
-
-        setup_ap(
-            access_point=self.access_point,
-            profile_name="whirlwind",
-            mode=mode,
-            channel=channel,
-            n_capabilities=n_capabilities,
-            ac_capabilities=[],
-            force_wmm=True,
-            ssid=ssid,
-            security=security_profile,
-            password=password,
-        )
-        asserts.assert_true(
-            self.dut.associate(
-                ssid, target_pwd=password, target_security=target_security
-            ),
-            "Failed to connect.",
-        )
-
-    def _generate_24_HT20_test_args(self):
-        test_args = []
-        for combination in itertools.product(
-            FREQUENCY_24,
-            CHANNEL_BANDWIDTH_20,
-            N_MODE,
-            LDPC,
-            TX_STBC,
-            RX_STBC,
-            SGI_20,
-            INTOLERANT_40,
-            MAX_AMPDU_7935,
-            SMPS,
-        ):
-            test_frequency = combination[0]
-            test_chbw = combination[1]
-            n_mode = combination[2]
-            n_capabilities = combination[3:]
-            test_args.append(
-                (
-                    {
-                        "frequency": test_frequency,
-                        "chbw": test_chbw,
-                        "n_mode": n_mode,
-                        "security": SECURITY_OPEN,
-                        "n_capabilities": n_capabilities,
-                    },
-                )
-            )
-        return test_args
-
-    def _generate_24_HT40_lower_test_args(self):
-        test_args = []
-        for combination in itertools.product(
-            FREQUENCY_24,
-            CHANNEL_BANDWIDTH_40_LOWER,
-            LDPC,
-            TX_STBC,
-            RX_STBC,
-            SGI_20,
-            SGI_40,
-            MAX_AMPDU_7935,
-            SMPS,
-            DSSS_CCK,
-        ):
-            test_frequency = combination[0]
-            test_chbw = combination[1]
-            n_capabilities = combination[2:]
-            test_args.append(
-                (
-                    {
-                        "frequency": test_frequency,
-                        "chbw": test_chbw,
-                        "n_mode": hostapd_constants.MODE_11N_MIXED,
-                        "security": SECURITY_OPEN,
-                        "n_capabilities": n_capabilities,
-                    },
-                )
-            )
-        return test_args
-
-    def _generate_24_HT40_upper_test_args(self):
-        test_args = []
-        for combination in itertools.product(
-            FREQUENCY_24,
-            CHANNEL_BANDWIDTH_40_UPPER,
-            LDPC,
-            TX_STBC,
-            RX_STBC,
-            SGI_20,
-            SGI_40,
-            MAX_AMPDU_7935,
-            SMPS,
-            DSSS_CCK,
-        ):
-            test_frequency = combination[0]
-            test_chbw = combination[1]
-            n_capabilities = combination[2:]
-            test_args.append(
-                (
-                    {
-                        "frequency": test_frequency,
-                        "chbw": test_chbw,
-                        "n_mode": hostapd_constants.MODE_11N_MIXED,
-                        "security": SECURITY_OPEN,
-                        "n_capabilities": n_capabilities,
-                    },
-                )
-            )
-        return test_args
-
-    def _generate_5_HT20_test_args(self):
-        test_args = []
-        for combination in itertools.product(
-            FREQUENCY_5,
-            CHANNEL_BANDWIDTH_20,
-            LDPC,
-            TX_STBC,
-            RX_STBC,
-            SGI_20,
-            INTOLERANT_40,
-            MAX_AMPDU_7935,
-            SMPS,
-        ):
-            test_frequency = combination[0]
-            test_chbw = combination[1]
-            n_capabilities = combination[2:]
-            test_args.append(
-                (
-                    {
-                        "frequency": test_frequency,
-                        "chbw": test_chbw,
-                        "n_mode": hostapd_constants.MODE_11N_MIXED,
-                        "security": SECURITY_OPEN,
-                        "n_capabilities": n_capabilities,
-                    },
-                )
-            )
-        return test_args
-
-    def _generate_5_HT40_lower_test_args(self):
-        test_args = []
-        for combination in itertools.product(
-            FREQUENCY_5,
-            CHANNEL_BANDWIDTH_40_LOWER,
-            LDPC,
-            TX_STBC,
-            RX_STBC,
-            SGI_20,
-            SGI_40,
-            MAX_AMPDU_7935,
-            SMPS,
-            DSSS_CCK,
-        ):
-            test_frequency = combination[0]
-            test_chbw = combination[1]
-            n_capabilities = combination[2:]
-            test_args.append(
-                (
-                    {
-                        "frequency": test_frequency,
-                        "chbw": test_chbw,
-                        "n_mode": hostapd_constants.MODE_11N_MIXED,
-                        "security": SECURITY_OPEN,
-                        "n_capabilities": n_capabilities,
-                    },
-                )
-            )
-        return test_args
-
-    def _generate_5_HT40_upper_test_args(self):
-        test_args = []
-        for combination in itertools.product(
-            FREQUENCY_5,
-            CHANNEL_BANDWIDTH_40_UPPER,
-            N_MODE,
-            LDPC,
-            TX_STBC,
-            RX_STBC,
-            SGI_20,
-            SGI_40,
-            MAX_AMPDU_7935,
-            SMPS,
-            DSSS_CCK,
-        ):
-            test_frequency = combination[0]
-            test_chbw = combination[1]
-            n_mode = combination[2]
-            n_capabilities = combination[3:]
-            test_args.append(
-                (
-                    {
-                        "frequency": test_frequency,
-                        "chbw": test_chbw,
-                        "n_mode": n_mode,
-                        "security": SECURITY_OPEN,
-                        "n_capabilities": n_capabilities,
-                    },
-                )
-            )
-        return test_args
-
-    def _generate_24_HT20_wpa2_test_args(self):
-        test_args = []
-        for combination in itertools.product(
-            FREQUENCY_24,
-            CHANNEL_BANDWIDTH_20,
-            LDPC,
-            TX_STBC,
-            RX_STBC,
-            SGI_20,
-            INTOLERANT_40,
-            MAX_AMPDU_7935,
-            SMPS,
-        ):
-            test_frequency = combination[0]
-            test_chbw = combination[1]
-            n_capabilities = combination[2:]
-            test_args.append(
-                (
-                    {
-                        "frequency": test_frequency,
-                        "chbw": test_chbw,
-                        "n_mode": hostapd_constants.MODE_11N_MIXED,
-                        "security": SECURITY_WPA2,
-                        "n_capabilities": n_capabilities,
-                    },
-                )
-            )
-        return test_args
-
-    def _generate_24_HT40_lower_wpa2_test_args(self):
-        test_args = []
-        for combination in itertools.product(
-            FREQUENCY_24,
-            CHANNEL_BANDWIDTH_40_LOWER,
-            LDPC,
-            TX_STBC,
-            RX_STBC,
-            SGI_20,
-            SGI_40,
-            MAX_AMPDU_7935,
-            SMPS,
-            DSSS_CCK,
-        ):
-            test_frequency = combination[0]
-            test_chbw = combination[1]
-            n_capabilities = combination[2:]
-            test_args.append(
-                (
-                    {
-                        "frequency": test_frequency,
-                        "chbw": test_chbw,
-                        "n_mode": hostapd_constants.MODE_11N_MIXED,
-                        "security": SECURITY_WPA2,
-                        "n_capabilities": n_capabilities,
-                    },
-                )
-            )
-        return test_args
-
-    def _generate_24_HT40_upper_wpa2_test_args(self):
-        test_args = []
-        for combination in itertools.product(
-            FREQUENCY_24,
-            CHANNEL_BANDWIDTH_40_UPPER,
-            LDPC,
-            TX_STBC,
-            RX_STBC,
-            SGI_20,
-            SGI_40,
-            MAX_AMPDU_7935,
-            SMPS,
-            DSSS_CCK,
-        ):
-            test_frequency = combination[0]
-            test_chbw = combination[1]
-            n_capabilities = combination[2:]
-            test_args.append(
-                (
-                    {
-                        "frequency": test_frequency,
-                        "chbw": test_chbw,
-                        "n_mode": hostapd_constants.MODE_11N_MIXED,
-                        "security": SECURITY_WPA2,
-                        "n_capabilities": n_capabilities,
-                    },
-                )
-            )
-        return test_args
-
-    def _generate_5_HT20_wpa2_test_args(self):
-        test_args = []
-        for combination in itertools.product(
-            FREQUENCY_5,
-            CHANNEL_BANDWIDTH_20,
-            LDPC,
-            TX_STBC,
-            RX_STBC,
-            SGI_20,
-            INTOLERANT_40,
-            MAX_AMPDU_7935,
-            SMPS,
-        ):
-            test_frequency = combination[0]
-            test_chbw = combination[1]
-            n_capabilities = combination[2:]
-            test_args.append(
-                (
-                    {
-                        "frequency": test_frequency,
-                        "chbw": test_chbw,
-                        "n_mode": hostapd_constants.MODE_11N_MIXED,
-                        "security": SECURITY_WPA2,
-                        "n_capabilities": n_capabilities,
-                    },
-                )
-            )
-        return test_args
-
-    def _generate_5_HT40_lower_wpa2_test_args(self):
-        test_args = []
-        for combination in itertools.product(
-            FREQUENCY_5,
-            CHANNEL_BANDWIDTH_40_LOWER,
-            LDPC,
-            TX_STBC,
-            RX_STBC,
-            SGI_20,
-            SGI_40,
-            MAX_AMPDU_7935,
-            SMPS,
-            DSSS_CCK,
-        ):
-            test_frequency = combination[0]
-            test_chbw = combination[1]
-            n_capabilities = combination[2:]
-            test_args.append(
-                (
-                    {
-                        "frequency": test_frequency,
-                        "chbw": test_chbw,
-                        "n_mode": hostapd_constants.MODE_11N_MIXED,
-                        "security": SECURITY_WPA2,
-                        "n_capabilities": n_capabilities,
-                    },
-                )
-            )
-        return test_args
-
-    def _generate_5_HT40_upper_wpa2_test_args(self):
-        test_args = []
-        for combination in itertools.product(
-            FREQUENCY_5,
-            CHANNEL_BANDWIDTH_40_UPPER,
-            LDPC,
-            TX_STBC,
-            RX_STBC,
-            SGI_20,
-            SGI_40,
-            MAX_AMPDU_7935,
-            SMPS,
-            DSSS_CCK,
-        ):
-            test_frequency = combination[0]
-            test_chbw = combination[1]
-            n_capabilities = combination[2:]
-            test_args.append(
-                (
-                    {
-                        "frequency": test_frequency,
-                        "chbw": test_chbw,
-                        "n_mode": hostapd_constants.MODE_11N_MIXED,
-                        "security": SECURITY_WPA2,
-                        "n_capabilities": n_capabilities,
-                    },
-                )
-            )
-        return test_args
-
-
-if __name__ == "__main__":
-    test_runner.main()
diff --git a/src/antlion/tests/wlan/facade/BUILD.gn b/src/antlion/tests/wlan/facade/BUILD.gn
deleted file mode 100644
index c62ce70..0000000
--- a/src/antlion/tests/wlan/facade/BUILD.gn
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright 2023 The Fuchsia Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//third_party/antlion/antlion_host_test.gni")
-import("//third_party/antlion/environments.gni")
-
-assert(is_host, "antlion tests only supported for host testing")
-
-antlion_host_test("wlan_deprecated_configuration_test") {
-  main_source = "WlanDeprecatedConfigurationTest.py"
-  environments = display_envs
-}
-
-antlion_host_test("wlan_facade_test") {
-  main_source = "WlanFacadeTest.py"
-  environments = display_envs
-}
-
-antlion_host_test("wlan_status_test") {
-  main_source = "WlanStatusTest.py"
-  environments = display_envs
-}
-
-group("e2e_tests") {
-  testonly = true
-  public_deps = [
-    ":wlan_deprecated_configuration_test($host_toolchain)",
-    ":wlan_facade_test($host_toolchain)",
-    ":wlan_status_test($host_toolchain)",
-  ]
-}
diff --git a/src/antlion/tests/wlan/facade/WlanDeprecatedConfigurationTest.py b/src/antlion/tests/wlan/facade/WlanDeprecatedConfigurationTest.py
deleted file mode 100644
index 7fee369..0000000
--- a/src/antlion/tests/wlan/facade/WlanDeprecatedConfigurationTest.py
+++ /dev/null
@@ -1,200 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import utils
-from antlion.test_utils.wifi import base_test
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-
-from mobly import asserts, test_runner
-
-AP_ROLE = "Ap"
-DEFAULT_SSID = "testssid"
-DEFAULT_SECURITY = "none"
-DEFAULT_PASSWORD = ""
-DEFAULT_CONNECTIVITY_MODE = "local_only"
-DEFAULT_OPERATING_BAND = "any"
-TEST_MAC_ADDR = "12:34:56:78:9a:bc"
-TEST_MAC_ADDR_SECONDARY = "bc:9a:78:56:34:12"
-
-
-class WlanDeprecatedConfigurationTest(base_test.WifiBaseTest):
-    """Tests for WlanDeprecatedConfigurationFacade"""
-
-    def setup_class(self):
-        super().setup_class()
-        self.dut = create_wlan_device(self.fuchsia_devices[0])
-
-    def setup_test(self):
-        self._stop_soft_aps()
-
-    def teardown_test(self):
-        self._stop_soft_aps()
-
-    def _get_ap_interface_mac_address(self):
-        """Retrieves mac address from wlan interface with role ap
-
-        Returns:
-            string, the mac address of the AP interface
-
-        Raises:
-            ConnectionError, if SL4F calls fail
-            AttributeError, if no interface has role 'Ap'
-        """
-        wlan_ifaces = self.dut.device.sl4f.wlan_lib.wlanGetIfaceIdList()
-        if wlan_ifaces.get("error"):
-            raise ConnectionError(
-                "Failed to get wlan interface IDs: %s" % wlan_ifaces["error"]
-            )
-
-        for wlan_iface in wlan_ifaces["result"]:
-            iface_info = self.dut.device.sl4f.wlan_lib.wlanQueryInterface(wlan_iface)
-            if iface_info.get("error"):
-                raise ConnectionError(
-                    "Failed to query wlan iface: %s" % iface_info["error"]
-                )
-
-            if iface_info["result"]["role"] == AP_ROLE:
-                if "mac_addr" in iface_info["result"]:
-                    return utils.mac_address_list_to_str(
-                        iface_info["result"]["mac_addr"]
-                    )
-                elif "sta_addr" in iface_info["result"]:
-                    return utils.mac_address_list_to_str(
-                        iface_info["result"]["sta_addr"]
-                    )
-                raise AttributeError("AP iface info does not contain MAC address.")
-        raise AttributeError(
-            "Failed to get ap interface mac address. No AP interface found."
-        )
-
-    def _start_soft_ap(self):
-        """Starts SoftAP on DUT.
-
-        Raises:
-            ConnectionError, if SL4F call fails.
-        """
-        self.log.info("Starting SoftAP on Fuchsia device (%s)." % self.dut.device.ip)
-        response = self.dut.device.sl4f.wlan_ap_policy_lib.wlanStartAccessPoint(
-            DEFAULT_SSID,
-            DEFAULT_SECURITY,
-            DEFAULT_PASSWORD,
-            DEFAULT_CONNECTIVITY_MODE,
-            DEFAULT_OPERATING_BAND,
-        )
-        if response.get("error"):
-            raise ConnectionError("Failed to setup SoftAP: %s" % response["error"])
-
-    def _stop_soft_aps(self):
-        """Stops SoftAP on DUT.
-
-        Raises:
-            ConnectionError, if SL4F call fails.
-        """
-        self.log.info("Stopping SoftAP.")
-        response = self.dut.device.sl4f.wlan_ap_policy_lib.wlanStopAllAccessPoint()
-        if response.get("error"):
-            raise ConnectionError("Failed to stop SoftAP: %s" % response["error"])
-
-    def _suggest_ap_mac_addr(self, mac_addr):
-        """Suggests mac address for AP interface.
-        Args:
-            mac_addr: string, mac address to suggest.
-
-        Raises:
-            TestFailure, if SL4F call fails.
-        """
-        self.log.info(
-            "Suggesting AP mac addr (%s) via wlan_deprecated_configuration_lib."
-            % mac_addr
-        )
-        response = self.dut.device.sl4f.wlan_deprecated_configuration_lib.wlanSuggestAccessPointMacAddress(
-            mac_addr
-        )
-        if response.get("error"):
-            asserts.fail(
-                "Failed to suggest AP mac address (%s): %s"
-                % (mac_addr, response["error"])
-            )
-
-    def _verify_mac_addr(self, expected_addr):
-        """Verifies mac address of ap interface is set to expected mac address.
-
-        Args:
-            Args:
-                expected_addr: string, expected mac address
-
-            Raises:
-                TestFailure, if actual mac address is not expected mac address.
-        """
-        set_mac_addr = self._get_ap_interface_mac_address()
-        if set_mac_addr != expected_addr:
-            asserts.fail(
-                "Failed to set AP mac address "
-                "via wlan_deprecated_configuration_lib. Expected mac addr: %s,"
-                " Actual mac addr: %s" % (expected_addr, set_mac_addr)
-            )
-        else:
-            self.log.info("AP mac address successfully set to %s" % expected_addr)
-
-    def test_suggest_ap_mac_address(self):
-        """Tests suggest ap mac address SL4F call
-
-        1. Get initial mac address
-        2. Suggest new mac address
-        3. Verify new mac address is set successfully
-        4. Reset to initial mac address
-        5. Verify initial mac address is reset successfully
-
-
-        Raises:
-            TestFailure, if wlanSuggestAccessPointMacAddress call fails or
-                of mac address is not the suggest value
-            ConnectionError, if other SL4F calls fail
-        """
-        # Retrieve initial ap mac address
-        self._start_soft_ap()
-
-        self.log.info("Getting initial mac address.")
-        initial_mac_addr = self._get_ap_interface_mac_address()
-        self.log.info("Initial mac address: %s" % initial_mac_addr)
-
-        if initial_mac_addr != TEST_MAC_ADDR:
-            suggested_mac_addr = TEST_MAC_ADDR
-        else:
-            suggested_mac_addr = TEST_MAC_ADDR_SECONDARY
-
-        self._stop_soft_aps()
-
-        # Suggest and verify new mac address
-        self._suggest_ap_mac_addr(suggested_mac_addr)
-
-        self._start_soft_ap()
-
-        self._verify_mac_addr(suggested_mac_addr)
-
-        self._stop_soft_aps()
-
-        # Reset to initial mac address and verify
-        self.log.info("Resetting to initial mac address (%s)." % initial_mac_addr)
-        self._suggest_ap_mac_addr(initial_mac_addr)
-
-        self._start_soft_ap()
-
-        self._verify_mac_addr(initial_mac_addr)
-
-
-if __name__ == "__main__":
-    test_runner.main()
diff --git a/src/antlion/tests/wlan/facade/WlanFacadeTest.py b/src/antlion/tests/wlan/facade/WlanFacadeTest.py
deleted file mode 100644
index fdddf69..0000000
--- a/src/antlion/tests/wlan/facade/WlanFacadeTest.py
+++ /dev/null
@@ -1,81 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Script for verifying that we can invoke methods of the WlanFacade.
-
-"""
-import array
-
-from antlion.test_utils.wifi import base_test
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-
-from mobly import asserts, signals, test_runner
-
-
-class WlanFacadeTest(base_test.WifiBaseTest):
-    def setup_class(self):
-        super().setup_class()
-        if len(self.fuchsia_devices) < 1:
-            raise signals.TestAbortClass(
-                "Sorry, please try verifying FuchsiaDevice is in your "
-                "config file and try again."
-            )
-        self.dut = create_wlan_device(self.fuchsia_devices[0])
-
-    def test_get_phy_id_list(self):
-        result = self.dut.device.sl4f.wlan_lib.wlanPhyIdList()
-        error = result["error"]
-        asserts.assert_true(error is None, error)
-
-        self.log.info("Got Phy IDs %s" % result["result"])
-        return True
-
-    def test_get_country(self):
-        wlan_lib = self.dut.device.sl4f.wlan_lib
-
-        result = wlan_lib.wlanPhyIdList()
-        error = result["error"]
-        asserts.assert_true(error is None, error)
-        phy_id = result["result"][0]
-
-        result = wlan_lib.wlanGetCountry(phy_id)
-        error = result["error"]
-        asserts.assert_true(error is None, error)
-
-        country_bytes = result["result"]
-        country_string = str(array.array("b", country_bytes), encoding="us-ascii")
-        self.log.info("Got country %s (%s)", country_string, country_bytes)
-        return True
-
-    def test_get_dev_path(self):
-        wlan_lib = self.dut.device.sl4f.wlan_lib
-
-        result = wlan_lib.wlanPhyIdList()
-        error = result["error"]
-        asserts.assert_true(error is None, error)
-        phy_id = result["result"][0]
-
-        result = wlan_lib.wlanGetDevPath(phy_id)
-        error = result["error"]
-        asserts.assert_true(error is None, error)
-
-        dev_path = result["result"]
-        self.log.info("Got device path: %s", dev_path)
-        return True
-
-
-if __name__ == "__main__":
-    test_runner.main()
diff --git a/src/antlion/tests/wlan/facade/WlanStatusTest.py b/src/antlion/tests/wlan/facade/WlanStatusTest.py
deleted file mode 100644
index cf70b0d..0000000
--- a/src/antlion/tests/wlan/facade/WlanStatusTest.py
+++ /dev/null
@@ -1,85 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Test to verify that a DUT's client interface's status can be queried.
-"""
-
-from antlion.test_utils.wifi import base_test
-
-from mobly import signals, test_runner
-
-
-class WlanStatusTest(base_test.WifiBaseTest):
-    """WLAN status test class.
-
-    Test Bed Requirements:
-    * One or more Fuchsia devices with WLAN client capabilities.
-    """
-
-    def setup_class(self):
-        super().setup_class()
-        for fd in self.fuchsia_devices:
-            fd.configure_wlan(
-                association_mechanism="policy", preserve_saved_networks=True
-            )
-
-    def on_fail(self, test_name, begin_time):
-        for fd in self.fuchsia_devices:
-            super().on_device_fail(fd, test_name, begin_time)
-            fd.configure_wlan(
-                association_mechanism="policy", preserve_saved_networks=True
-            )
-
-    def test_wlan_stopped_client_status(self):
-        """Queries WLAN status on DUTs with no WLAN ifaces.
-
-        Tests that DUTs without WLAN interfaces have empty results and return
-        an error when queried for status.
-        """
-        for fd in self.fuchsia_devices:
-            fd.deconfigure_wlan()
-
-            status = fd.sl4f.wlan_lib.wlanStatus()
-            self.log.debug(status)
-            if not status["error"] or status["result"]:
-                raise signals.TestFailure("DUT's WLAN client status should be empty")
-
-        raise signals.TestPass("Success")
-
-    def test_wlan_started_client_status(self):
-        """Queries WLAN status on DUTs with WLAN ifaces.
-
-        Tests that, once WLAN client interfaces have been created, each one
-        returns a result and that none of them return errors when queried for
-        status.
-        """
-        for fd in self.fuchsia_devices:
-            fd.configure_wlan(
-                association_mechanism="policy", preserve_saved_networks=True
-            )
-
-            status = fd.sl4f.wlan_lib.wlanStatus()
-            self.log.debug(status)
-            if status["error"] or not status["result"]:
-                raise signals.TestFailure(
-                    "DUT's WLAN client status should be populated"
-                )
-
-        raise signals.TestPass("Success")
-
-
-if __name__ == "__main__":
-    test_runner.main()
diff --git a/src/antlion/tests/wlan/functional/BUILD.gn b/src/antlion/tests/wlan/functional/BUILD.gn
deleted file mode 100644
index 7171f0a..0000000
--- a/src/antlion/tests/wlan/functional/BUILD.gn
+++ /dev/null
@@ -1,97 +0,0 @@
-# Copyright 2023 The Fuchsia Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//third_party/antlion/antlion_host_test.gni")
-import("//third_party/antlion/environments.gni")
-
-assert(is_host, "antlion tests only supported for host testing")
-
-antlion_host_test("beacon_loss_test") {
-  main_source = "BeaconLossTest.py"
-  environments = display_ap_envs
-}
-
-antlion_host_test("channel_switch_test") {
-  main_source = "ChannelSwitchTest.py"
-  environments = display_ap_envs
-}
-
-antlion_host_test("connection_stress_test") {
-  main_source = "ConnectionStressTest.py"
-  environments = display_ap_envs
-}
-
-antlion_host_test("download_stress_test") {
-  main_source = "DownloadStressTest.py"
-
-  # Requires external internet access. This is considered bad practice for an
-  # automated test due to reliance on external services. Will remain an at-desk
-  # dest until rewritten to remove dependance on external services.
-  # environments = display_ap_envs
-  environments = []
-}
-
-antlion_host_test("ping_stress_test") {
-  main_source = "PingStressTest.py"
-  environments = display_ap_envs
-}
-
-antlion_host_test("soft_ap_test") {
-  main_source = "SoftApTest.py"
-
-  # Requires one Fuchsia device and one Anddroid device. There are no
-  # infra-hosted environments to run this test on. Will likely remain an at-desk
-  # test for as long as it requires an Android device.
-  environments = []
-}
-
-antlion_host_test("wlan_reboot_test") {
-  main_source = "WlanRebootTest.py"
-  test_params = "wlan_reboot_test_params.yaml"
-  environments = display_ap_iperf_envs
-}
-
-antlion_host_test("wlan_scan_test") {
-  main_source = "WlanScanTest.py"
-  environments = display_ap_envs
-}
-
-antlion_host_test("wlan_target_security_test") {
-  main_source = "WlanTargetSecurityTest.py"
-  environments = display_ap_envs
-}
-
-antlion_host_test("wlan_wireless_network_management_test") {
-  main_source = "WlanWirelessNetworkManagementTest.py"
-  environments = display_ap_envs
-}
-
-group("e2e_tests") {
-  testonly = true
-  public_deps = [
-    ":beacon_loss_test($host_toolchain)",
-    ":channel_switch_test($host_toolchain)",
-    ":ping_stress_test($host_toolchain)",
-    ":wlan_reboot_test($host_toolchain)",
-    ":wlan_scan_test($host_toolchain)",
-    ":wlan_target_security_test($host_toolchain)",
-    ":wlan_wireless_network_management_test($host_toolchain)",
-  ]
-}
-
-group("e2e_tests_quick") {
-  testonly = true
-  public_deps = [
-    ":ping_stress_test($host_toolchain)",
-  ]
-}
-
-# Tests that are disabled in automation
-group("e2e_tests_manual") {
-  testonly = true
-  public_deps = [
-    ":download_stress_test($host_toolchain)",
-    ":soft_ap_test($host_toolchain)",
-  ]
-}
diff --git a/src/antlion/tests/wlan/functional/ChannelSwitchTest.py b/src/antlion/tests/wlan/functional/ChannelSwitchTest.py
deleted file mode 100644
index b9a674c..0000000
--- a/src/antlion/tests/wlan/functional/ChannelSwitchTest.py
+++ /dev/null
@@ -1,412 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Tests STA handling of channel switch announcements.
-"""
-
-import random
-import time
-from typing import Sequence
-
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.utils import rand_ascii_str
-from antlion.test_utils.wifi import base_test
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-
-from mobly import asserts, test_runner
-
-
-class ChannelSwitchTest(base_test.WifiBaseTest):
-    # Time to wait between issuing channel switches
-    WAIT_BETWEEN_CHANNEL_SWITCHES_S = 15
-
-    # For operating class 115 tests.
-    GLOBAL_OPERATING_CLASS_115_CHANNELS = [36, 40, 44, 48]
-    # A channel outside the operating class.
-    NON_GLOBAL_OPERATING_CLASS_115_CHANNEL = 52
-
-    # For operating class 124 tests.
-    GLOBAL_OPERATING_CLASS_124_CHANNELS = [149, 153, 157, 161]
-    # A channel outside the operating class.
-    NON_GLOBAL_OPERATING_CLASS_124_CHANNEL = 52
-
-    def setup_class(self) -> None:
-        super().setup_class()
-        self.ssid = rand_ascii_str(10)
-
-        device_type = self.user_params.get("dut", "fuchsia_devices")
-        if device_type == "fuchsia_devices":
-            self.dut = create_wlan_device(self.fuchsia_devices[0])
-        elif device_type == "android_devices":
-            self.dut = create_wlan_device(self.android_devices[0])
-        else:
-            raise ValueError(
-                f'Invalid "dut" type specified in config: "{device_type}".'
-                'Expected "fuchsia_devices" or "android_devices".'
-            )
-
-        self.access_point = self.access_points[0]
-        self._stop_all_soft_aps()
-        self.in_use_interface = None
-
-    def teardown_test(self) -> None:
-        self.dut.disconnect()
-        self.dut.reset_wifi()
-        self.download_ap_logs()
-        self.access_point.stop_all_aps()
-
-    # TODO(fxbug.dev/85738): Change band type to an enum.
-    def channel_switch(
-        self,
-        band: str,
-        starting_channel: int,
-        channel_switches: Sequence[int],
-        test_with_soft_ap: bool = False,
-    ) -> None:
-        """Setup and run a channel switch test with the given parameters.
-
-        Creates an AP, associates to it, and then issues channel switches
-        through the provided channels. After each channel switch, the test
-        checks that the DUT is connected for a period of time before considering
-        the channel switch successful. If directed to start a SoftAP, the test
-        will also check that the SoftAP is on the expected channel after each
-        channel switch.
-
-        Args:
-            band: band that AP will use, must be a valid band (e.g.
-                hostapd_constants.BAND_2G)
-            starting_channel: channel number that AP will use at startup
-            channel_switches: ordered list of channels that the test will
-                attempt to switch to
-            test_with_soft_ap: whether to start a SoftAP before beginning the
-                channel switches (default is False); note that if a SoftAP is
-                started, the test will also check that the SoftAP handles
-                channel switches correctly
-        """
-        asserts.assert_true(
-            band in [hostapd_constants.BAND_2G, hostapd_constants.BAND_5G],
-            "Failed to setup AP, invalid band {}".format(band),
-        )
-
-        self.current_channel_num = starting_channel
-        if band == hostapd_constants.BAND_5G:
-            self.in_use_interface = self.access_point.wlan_5g
-        elif band == hostapd_constants.BAND_2G:
-            self.in_use_interface = self.access_point.wlan_2g
-        asserts.assert_true(
-            self._channels_valid_for_band([self.current_channel_num], band),
-            "starting channel {} not a valid channel for band {}".format(
-                self.current_channel_num, band
-            ),
-        )
-
-        setup_ap(
-            access_point=self.access_point,
-            profile_name="whirlwind",
-            channel=self.current_channel_num,
-            ssid=self.ssid,
-        )
-        if test_with_soft_ap:
-            self._start_soft_ap()
-        self.log.info("sending associate command for ssid %s", self.ssid)
-        self.dut.associate(target_ssid=self.ssid)
-        asserts.assert_true(self.dut.is_connected(), "Failed to connect.")
-
-        asserts.assert_true(
-            channel_switches, "Cannot run test, no channels to switch to"
-        )
-        asserts.assert_true(
-            self._channels_valid_for_band(channel_switches, band),
-            "channel_switches {} includes invalid channels for band {}".format(
-                channel_switches, band
-            ),
-        )
-
-        for channel_num in channel_switches:
-            if channel_num == self.current_channel_num:
-                continue
-            self.log.info(
-                "channel switch: {} -> {}".format(self.current_channel_num, channel_num)
-            )
-            self.access_point.channel_switch(self.in_use_interface, channel_num)
-            channel_num_after_switch = self.access_point.get_current_channel(
-                self.in_use_interface
-            )
-            asserts.assert_equal(
-                channel_num_after_switch, channel_num, "AP failed to channel switch"
-            )
-            self.current_channel_num = channel_num
-
-            # Check periodically to see if DUT stays connected. Sometimes
-            # CSA-induced disconnects occur seconds after last channel switch.
-            for _ in range(self.WAIT_BETWEEN_CHANNEL_SWITCHES_S):
-                asserts.assert_true(
-                    self.dut.is_connected(),
-                    "Failed to stay connected after channel switch.",
-                )
-                client_channel = self._client_channel()
-                asserts.assert_equal(
-                    client_channel,
-                    channel_num,
-                    "Client interface on wrong channel ({})".format(client_channel),
-                )
-                if test_with_soft_ap:
-                    soft_ap_channel = self._soft_ap_channel()
-                    asserts.assert_equal(
-                        soft_ap_channel,
-                        channel_num,
-                        "SoftAP interface on wrong channel ({})".format(
-                            soft_ap_channel
-                        ),
-                    )
-                time.sleep(1)
-
-    def test_channel_switch_2g(self) -> None:
-        """Channel switch through all (US only) channels in the 2 GHz band."""
-        self.channel_switch(
-            band=hostapd_constants.BAND_2G,
-            starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            channel_switches=hostapd_constants.US_CHANNELS_2G,
-        )
-
-    def test_channel_switch_2g_with_soft_ap(self) -> None:
-        """Channel switch through (US only) 2 Ghz channels with SoftAP up."""
-        self.channel_switch(
-            band=hostapd_constants.BAND_2G,
-            starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            channel_switches=hostapd_constants.US_CHANNELS_2G,
-            test_with_soft_ap=True,
-        )
-
-    def test_channel_switch_2g_shuffled_with_soft_ap(self) -> None:
-        """Switch through shuffled (US only) 2 Ghz channels with SoftAP up."""
-        channels = hostapd_constants.US_CHANNELS_2G
-        random.shuffle(channels)
-        self.log.info("Shuffled channel switch sequence: {}".format(channels))
-        self.channel_switch(
-            band=hostapd_constants.BAND_2G,
-            starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            channel_switches=channels,
-            test_with_soft_ap=True,
-        )
-
-    # TODO(fxbug.dev/84777): This test fails.
-    def test_channel_switch_5g(self) -> None:
-        """Channel switch through all (US only) channels in the 5 GHz band."""
-        self.channel_switch(
-            band=hostapd_constants.BAND_5G,
-            starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            channel_switches=hostapd_constants.US_CHANNELS_5G,
-        )
-
-    # TODO(fxbug.dev/84777): This test fails.
-    def test_channel_switch_5g_with_soft_ap(self) -> None:
-        """Channel switch through (US only) 5 GHz channels with SoftAP up."""
-        self.channel_switch(
-            band=hostapd_constants.BAND_5G,
-            starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            channel_switches=hostapd_constants.US_CHANNELS_5G,
-            test_with_soft_ap=True,
-        )
-
-    def test_channel_switch_5g_shuffled_with_soft_ap(self) -> None:
-        """Switch through shuffled (US only) 5 Ghz channels with SoftAP up."""
-        channels = hostapd_constants.US_CHANNELS_5G
-        random.shuffle(channels)
-        self.log.info("Shuffled channel switch sequence: {}".format(channels))
-        self.channel_switch(
-            band=hostapd_constants.BAND_5G,
-            starting_channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            channel_switches=channels,
-            test_with_soft_ap=True,
-        )
-
-    # TODO(fxbug.dev/84777): This test fails.
-    def test_channel_switch_regression_global_operating_class_115(self) -> None:
-        """Channel switch into, through, and out of global op. class 115 channels.
-
-        Global operating class 115 is described in IEEE 802.11-2016 Table E-4.
-        Regression test for fxbug.dev/84777.
-        """
-        channels = self.GLOBAL_OPERATING_CLASS_115_CHANNELS + [
-            self.NON_GLOBAL_OPERATING_CLASS_115_CHANNEL
-        ]
-        self.channel_switch(
-            band=hostapd_constants.BAND_5G,
-            starting_channel=self.NON_GLOBAL_OPERATING_CLASS_115_CHANNEL,
-            channel_switches=channels,
-        )
-
-    # TODO(fxbug.dev/84777): This test fails.
-    def test_channel_switch_regression_global_operating_class_115_with_soft_ap(
-        self,
-    ) -> None:
-        """Test global operating class 124 channel switches, with SoftAP.
-
-        Regression test for fxbug.dev/84777.
-        """
-        channels = self.GLOBAL_OPERATING_CLASS_115_CHANNELS + [
-            self.NON_GLOBAL_OPERATING_CLASS_115_CHANNEL
-        ]
-        self.channel_switch(
-            band=hostapd_constants.BAND_5G,
-            starting_channel=self.NON_GLOBAL_OPERATING_CLASS_115_CHANNEL,
-            channel_switches=channels,
-            test_with_soft_ap=True,
-        )
-
-    # TODO(fxbug.dev/84777): This test fails.
-    def test_channel_switch_regression_global_operating_class_124(self) -> None:
-        """Switch into, through, and out of global op. class 124 channels.
-
-        Global operating class 124 is described in IEEE 802.11-2016 Table E-4.
-        Regression test for fxbug.dev/64279.
-        """
-        channels = self.GLOBAL_OPERATING_CLASS_124_CHANNELS + [
-            self.NON_GLOBAL_OPERATING_CLASS_124_CHANNEL
-        ]
-        self.channel_switch(
-            band=hostapd_constants.BAND_5G,
-            starting_channel=self.NON_GLOBAL_OPERATING_CLASS_124_CHANNEL,
-            channel_switches=channels,
-        )
-
-    # TODO(fxbug.dev/84777): This test fails.
-    def test_channel_switch_regression_global_operating_class_124_with_soft_ap(
-        self,
-    ) -> None:
-        """Test global operating class 124 channel switches, with SoftAP.
-
-        Regression test for fxbug.dev/64279.
-        """
-        channels = self.GLOBAL_OPERATING_CLASS_124_CHANNELS + [
-            self.NON_GLOBAL_OPERATING_CLASS_124_CHANNEL
-        ]
-        self.channel_switch(
-            band=hostapd_constants.BAND_5G,
-            starting_channel=self.NON_GLOBAL_OPERATING_CLASS_124_CHANNEL,
-            channel_switches=channels,
-            test_with_soft_ap=True,
-        )
-
-    def _channels_valid_for_band(self, channels: Sequence[int], band: str) -> bool:
-        """Determine if the channels are valid for the band (US only).
-
-        Args:
-            channels: channel numbers
-            band: a valid band (e.g. hostapd_constants.BAND_2G)
-        """
-        if band == hostapd_constants.BAND_2G:
-            band_channels = frozenset(hostapd_constants.US_CHANNELS_2G)
-        elif band == hostapd_constants.BAND_5G:
-            band_channels = frozenset(hostapd_constants.US_CHANNELS_5G)
-        else:
-            asserts.fail("Invalid band {}".format(band))
-        channels_set = frozenset(channels)
-        if channels_set <= band_channels:
-            return True
-        return False
-
-    def _start_soft_ap(self) -> None:
-        """Start a SoftAP on the DUT.
-
-        Raises:
-            EnvironmentError: if the SoftAP does not start
-        """
-        ssid = rand_ascii_str(10)
-        security_type = "none"
-        password = ""
-        connectivity_mode = "local_only"
-        operating_band = "any"
-
-        self.log.info("Starting SoftAP on DUT")
-
-        response = self.dut.device.sl4f.wlan_ap_policy_lib.wlanStartAccessPoint(
-            ssid, security_type, password, connectivity_mode, operating_band
-        )
-        if response.get("error"):
-            raise EnvironmentError(
-                "SL4F: Failed to setup SoftAP. Err: %s" % response["error"]
-            )
-        self.log.info("SoftAp network (%s) is up." % ssid)
-
-    def _stop_all_soft_aps(self) -> None:
-        """Stops all SoftAPs on Fuchsia Device.
-
-        Raises:
-            EnvironmentError: if SoftAP stop call fails
-        """
-        response = self.dut.device.sl4f.wlan_ap_policy_lib.wlanStopAllAccessPoint()
-        if response.get("error"):
-            raise EnvironmentError(
-                "SL4F: Failed to stop all SoftAPs. Err: %s" % response["error"]
-            )
-
-    def _client_channel(self) -> int:
-        """Determine the channel of the DUT client interface.
-
-        If the interface is not connected, the method will assert a test
-        failure.
-
-        Returns: channel number
-
-        Raises:
-            EnvironmentError: if client interface channel cannot be
-                determined
-        """
-        status = self.dut.status()
-        if status["error"]:
-            raise EnvironmentError("Could not determine client channel")
-
-        result = status["result"]
-        if isinstance(result, dict):
-            if result.get("Connected"):
-                return result["Connected"]["channel"]["primary"]
-            asserts.fail("Client interface not connected")
-        raise EnvironmentError("Could not determine client channel")
-
-    def _soft_ap_channel(self) -> int:
-        """Determine the channel of the DUT SoftAP interface.
-
-        If the interface is not connected, the method will assert a test
-        failure.
-
-        Returns: channel number
-
-        Raises:
-            EnvironmentError: if SoftAP interface channel cannot be determined.
-        """
-        iface_ids = self.dut.get_wlan_interface_id_list()
-        for iface_id in iface_ids:
-            query = self.dut.device.sl4f.wlan_lib.wlanQueryInterface(iface_id)
-            if query["error"]:
-                continue
-            query_result = query["result"]
-            if type(query_result) is dict and query_result.get("role") == "Ap":
-                status = self.dut.device.sl4f.wlan_lib.wlanStatus(iface_id)
-                if status["error"]:
-                    continue
-                status_result = status["result"]
-                if isinstance(status_result, dict):
-                    if status_result.get("Connected"):
-                        return status_result["Connected"]["channel"]["primary"]
-                    asserts.fail("SoftAP interface not connected")
-        raise EnvironmentError("Could not determine SoftAP channel")
-
-
-if __name__ == "__main__":
-    test_runner.main()
diff --git a/src/antlion/tests/wlan/functional/ConnectionStressTest.py b/src/antlion/tests/wlan/functional/ConnectionStressTest.py
deleted file mode 100644
index fa52c7f..0000000
--- a/src/antlion/tests/wlan/functional/ConnectionStressTest.py
+++ /dev/null
@@ -1,228 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Script for testing WiFi connection and disconnection in a loop
-
-"""
-
-import time
-
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_security
-from antlion.test_utils.wifi import base_test
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-from antlion.utils import rand_ascii_str
-
-from mobly import signals, test_runner
-
-
-class ConnectionStressTest(base_test.WifiBaseTest):
-    # Default number of test iterations here.
-    # Override using parameter in config file.
-    # Eg: "connection_stress_test_iterations": "50"
-    num_of_iterations = 10
-    channel_2G = hostapd_constants.AP_DEFAULT_CHANNEL_2G
-    channel_5G = hostapd_constants.AP_DEFAULT_CHANNEL_5G
-
-    def setup_class(self):
-        super().setup_class()
-        self.ssid = rand_ascii_str(10)
-        self.fd = self.fuchsia_devices[0]
-        self.dut = create_wlan_device(self.fd)
-        self.access_point = self.access_points[0]
-        self.num_of_iterations = int(
-            self.user_params.get(
-                "connection_stress_test_iterations", self.num_of_iterations
-            )
-        )
-        self.log.info("iterations: %d" % self.num_of_iterations)
-
-    def teardown_test(self):
-        self.dut.reset_wifi()
-        self.download_ap_logs()
-        self.access_point.stop_all_aps()
-
-    def on_fail(self, test_name, begin_time):
-        super().on_fail(test_name, begin_time)
-        self.access_point.stop_all_aps()
-
-    def start_ap(self, profile, channel, security=None):
-        """Starts an Access Point
-
-        Args:
-            profile: Profile name such as 'whirlwind'
-            channel: Channel to operate on
-        """
-        self.log.info("Profile: %s, Channel: %d" % (profile, channel))
-        setup_ap(
-            access_point=self.access_point,
-            profile_name=profile,
-            channel=channel,
-            ssid=self.ssid,
-            security=security,
-        )
-
-    def connect_disconnect(
-        self, ap_config, ssid=None, password=None, negative_test=False
-    ):
-        """Helper to start an AP, connect DUT to it and disconnect
-
-        Args:
-            ap_config: Dictionary contaning profile name and channel
-            ssid: ssid to connect to
-            password: password for the ssid to connect to
-        """
-        security_mode = ap_config.get("security_mode", None)
-        target_security = (
-            hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
-                security_mode, None
-            )
-        )
-
-        if security_mode:
-            security_profile = hostapd_security.Security(
-                security_mode=ap_config["security_mode"], password=ap_config["password"]
-            )
-        else:
-            security_profile = None
-
-        # Start AP
-        self.start_ap(
-            ap_config["profile"], ap_config["channel"], security=security_profile
-        )
-
-        failed = False
-        # Connect and Disconnect several times
-        for x in range(0, self.num_of_iterations):
-            if not ssid:
-                ssid = self.ssid
-            if negative_test:
-                if not self.dut.associate(
-                    ssid, target_pwd=password, target_security=target_security
-                ):
-                    self.log.info("Attempt %d. Did not associate as expected." % x)
-                else:
-                    self.log.error(
-                        "Attempt %d. Negative test successfully "
-                        "associated. Fail." % x
-                    )
-                    failed = True
-            else:
-                # Connect
-                if self.dut.associate(ssid, target_pwd=password):
-                    self.log.info("Attempt %d. Successfully associated" % x)
-                else:
-                    self.log.error("Attempt %d. Failed to associate." % x)
-                    failed = True
-                # Disconnect
-                self.dut.disconnect()
-
-            # Wait a second before trying again
-            time.sleep(1)
-
-        # Stop AP
-        self.access_point.stop_all_aps()
-        if failed:
-            raise signals.TestFailure("One or more association attempt failed.")
-
-    def test_whirlwind_2g(self):
-        self.connect_disconnect(
-            {"profile": "whirlwind", "channel": self.channel_2G, "security_mode": None}
-        )
-
-    def test_whirlwind_5g(self):
-        self.connect_disconnect(
-            {"profile": "whirlwind", "channel": self.channel_5G, "security_mode": None}
-        )
-
-    def test_whirlwind_11ab_2g(self):
-        self.connect_disconnect(
-            {
-                "profile": "whirlwind_11ab_legacy",
-                "channel": self.channel_2G,
-                "security_mode": None,
-            }
-        )
-
-    def test_whirlwind_11ab_5g(self):
-        self.connect_disconnect(
-            {
-                "profile": "whirlwind_11ab_legacy",
-                "channel": self.channel_5G,
-                "security_mode": None,
-            }
-        )
-
-    def test_whirlwind_11ag_2g(self):
-        self.connect_disconnect(
-            {
-                "profile": "whirlwind_11ag_legacy",
-                "channel": self.channel_2G,
-                "security_mode": None,
-            }
-        )
-
-    def test_whirlwind_11ag_5g(self):
-        self.connect_disconnect(
-            {
-                "profile": "whirlwind_11ag_legacy",
-                "channel": self.channel_5G,
-                "security_mode": None,
-            }
-        )
-
-    def test_wrong_ssid_whirlwind_2g(self):
-        self.connect_disconnect(
-            {"profile": "whirlwind", "channel": self.channel_2G, "security_mode": None},
-            ssid=rand_ascii_str(20),
-            negative_test=True,
-        )
-
-    def test_wrong_ssid_whirlwind_5g(self):
-        self.connect_disconnect(
-            {"profile": "whirlwind", "channel": self.channel_5G, "security_mode": None},
-            ssid=rand_ascii_str(20),
-            negative_test=True,
-        )
-
-    def test_wrong_password_whirlwind_2g(self):
-        self.connect_disconnect(
-            {
-                "profile": "whirlwind",
-                "channel": self.channel_2G,
-                "security_mode": hostapd_constants.WPA2_STRING,
-                "password": rand_ascii_str(10),
-            },
-            password=rand_ascii_str(20),
-            negative_test=True,
-        )
-
-    def test_wrong_password_whirlwind_5g(self):
-        self.connect_disconnect(
-            {
-                "profile": "whirlwind",
-                "channel": self.channel_5G,
-                "security_mode": hostapd_constants.WPA2_STRING,
-                "password": rand_ascii_str(10),
-            },
-            password=rand_ascii_str(20),
-            negative_test=True,
-        )
-
-
-if __name__ == "__main__":
-    test_runner.main()
diff --git a/src/antlion/tests/wlan/functional/DownloadStressTest.py b/src/antlion/tests/wlan/functional/DownloadStressTest.py
deleted file mode 100644
index 28012c3..0000000
--- a/src/antlion/tests/wlan/functional/DownloadStressTest.py
+++ /dev/null
@@ -1,192 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-Script for testing various download stress scenarios.
-
-"""
-import threading
-
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.test_utils.wifi import base_test
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-from antlion.test_utils.fuchsia import utils
-from antlion.utils import rand_ascii_str
-
-from mobly import signals, test_runner
-
-
-class DownloadStressTest(base_test.WifiBaseTest):
-    # Default number of test iterations here.
-    # Override using parameter in config file.
-    # Eg: "download_stress_test_iterations": "10"
-    num_of_iterations = 3
-
-    # Timeout for download thread in seconds
-    download_timeout_s = 60 * 5
-
-    # Download urls
-    url_20MB = "http://ipv4.download.thinkbroadband.com/20MB.zip"
-    url_40MB = "http://ipv4.download.thinkbroadband.com/40MB.zip"
-    url_60MB = "http://ipv4.download.thinkbroadband.com/60MB.zip"
-    url_512MB = "http://ipv4.download.thinkbroadband.com/512MB.zip"
-
-    # Constants used in test_one_large_multiple_small_downloads
-    download_small_url = url_20MB
-    download_large_url = url_512MB
-    num_of_small_downloads = 5
-    download_threads_result = []
-
-    def setup_class(self):
-        super().setup_class()
-        self.ssid = rand_ascii_str(10)
-        self.dut = create_wlan_device(self.fuchsia_devices[0])
-        self.access_point = self.access_points[0]
-        self.num_of_iterations = int(
-            self.user_params.get(
-                "download_stress_test_iterations", self.num_of_iterations
-            )
-        )
-
-        setup_ap(
-            access_point=self.access_point,
-            profile_name="whirlwind",
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            ssid=self.ssid,
-        )
-        self.dut.associate(self.ssid)
-
-    def teardown_test(self):
-        self.download_threads_result.clear()
-        self.dut.disconnect()
-        self.dut.reset_wifi()
-        self.download_ap_logs()
-        self.access_point.stop_all_aps()
-
-    def test_download_small(self):
-        self.log.info("Downloading small file")
-        return self.download_file(self.url_20MB)
-
-    def test_download_large(self):
-        return self.download_file(self.url_512MB)
-
-    def test_continuous_download(self):
-        for x in range(0, self.num_of_iterations):
-            if not self.download_file(self.url_512MB):
-                return False
-        return True
-
-    def download_file(self, url):
-        self.log.info("Start downloading: %s" % url)
-        return utils.http_file_download_by_curl(
-            self.dut.device,
-            url,
-            additional_args="--max-time %d --silent" % self.download_timeout_s,
-        )
-
-    def download_thread(self, url):
-        download_status = self.download_file(url)
-        if download_status:
-            self.log.info("Success downloading: %s" % url)
-        else:
-            self.log.info("Failure downloading: %s" % url)
-
-        self.download_threads_result.append(download_status)
-        return download_status
-
-    def test_multi_downloads(self):
-        download_urls = [self.url_20MB, self.url_40MB, self.url_60MB]
-        download_threads = []
-
-        try:
-            # Start multiple downloads at the same time
-            for index, url in enumerate(download_urls):
-                self.log.info("Create and start thread %d." % index)
-                t = threading.Thread(target=self.download_thread, args=(url,))
-                download_threads.append(t)
-                t.start()
-
-            # Wait for all threads to complete or timeout
-            for t in download_threads:
-                t.join(self.download_timeout_s)
-
-        finally:
-            is_alive = False
-
-            for index, t in enumerate(download_threads):
-                if t.isAlive():
-                    t = None
-                    is_alive = True
-
-            if is_alive:
-                raise signals.TestFailure("Thread %d timedout" % index)
-
-        for index in range(0, len(self.download_threads_result)):
-            if not self.download_threads_result[index]:
-                self.log.info("Download failed for %d" % index)
-                raise signals.TestFailure("Thread %d failed to download" % index)
-                return False
-
-        return True
-
-    def test_one_large_multiple_small_downloads(self):
-        for index in range(self.num_of_iterations):
-            download_threads = []
-            try:
-                large_thread = threading.Thread(
-                    target=self.download_thread, args=(self.download_large_url,)
-                )
-                download_threads.append(large_thread)
-                large_thread.start()
-
-                for i in range(self.num_of_small_downloads):
-                    # Start small file download
-                    t = threading.Thread(
-                        target=self.download_thread, args=(self.download_small_url,)
-                    )
-                    download_threads.append(t)
-                    t.start()
-                    # Wait for thread to exit before starting the next iteration
-                    t.join(self.download_timeout_s)
-
-                # Wait for the large file download thread to complete
-                large_thread.join(self.download_timeout_s)
-
-            finally:
-                is_alive = False
-
-                for index, t in enumerate(download_threads):
-                    if t.isAlive():
-                        t = None
-                        is_alive = True
-
-                if is_alive:
-                    raise signals.TestFailure("Thread %d timedout" % index)
-
-            for index in range(0, len(self.download_threads_result)):
-                if not self.download_threads_result[index]:
-                    self.log.info("Download failed for %d" % index)
-                    raise signals.TestFailure("Thread %d failed to download" % index)
-                    return False
-
-            # Clear results before looping again
-            self.download_threads_result.clear()
-
-        return True
-
-
-if __name__ == "__main__":
-    test_runner.main()
diff --git a/src/antlion/tests/wlan/functional/SoftApTest.py b/src/antlion/tests/wlan/functional/SoftApTest.py
deleted file mode 100644
index 471c5a3..0000000
--- a/src/antlion/tests/wlan/functional/SoftApTest.py
+++ /dev/null
@@ -1,2112 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import multiprocessing as mp
-import random
-import time
-
-from antlion import utils
-from antlion.controllers import iperf_server
-from antlion.controllers import iperf_client
-from antlion.controllers.access_point import setup_ap, AccessPoint
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_security
-from antlion.controllers.ap_lib.hostapd_utils import generate_random_password
-from antlion.controllers.utils_lib.ssh import settings
-from antlion.test_utils.wifi import base_test
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-
-from mobly import asserts, signals, test_runner
-
-CONNECTIVITY_MODE_LOCAL = "local_only"
-CONNECTIVITY_MODE_UNRESTRICTED = "unrestricted"
-DEFAULT_AP_PROFILE = "whirlwind"
-DEFAULT_IPERF_PORT = 5201
-DEFAULT_STRESS_TEST_ITERATIONS = 10
-DEFAULT_TIMEOUT = 30
-DEFAULT_IPERF_TIMEOUT = 60
-DEFAULT_NO_ADDR_EXPECTED_TIMEOUT = 5
-INTERFACE_ROLE_AP = "Ap"
-INTERFACE_ROLE_CLIENT = "Client"
-OPERATING_BAND_2G = "only_2_4_ghz"
-OPERATING_BAND_5G = "only_5_ghz"
-OPERATING_BAND_ANY = "any"
-SECURITY_OPEN = "none"
-SECURITY_WEP = "wep"
-SECURITY_WPA = "wpa"
-SECURITY_WPA2 = "wpa2"
-SECURITY_WPA3 = "wpa3"
-STATE_UP = True
-STATE_DOWN = False
-TEST_TYPE_ASSOCIATE_ONLY = "associate_only"
-TEST_TYPE_ASSOCIATE_AND_PING = "associate_and_ping"
-TEST_TYPE_ASSOCIATE_AND_PASS_TRAFFIC = "associate_and_pass_traffic"
-TEST_TYPES = {
-    TEST_TYPE_ASSOCIATE_ONLY,
-    TEST_TYPE_ASSOCIATE_AND_PING,
-    TEST_TYPE_ASSOCIATE_AND_PASS_TRAFFIC,
-}
-
-
-def get_test_name_from_settings(settings):
-    return settings["test_name"]
-
-
-def get_ap_params_from_config_or_default(config):
-    """Retrieves AP parameters from antlion config, or returns default settings.
-
-    Args:
-        config: dict, from antlion config, that may contain custom ap parameters
-
-    Returns:
-        dict, containing all AP parameters
-    """
-    profile = config.get("profile", DEFAULT_AP_PROFILE)
-    ssid = config.get("ssid", utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G))
-    channel = config.get("channel", hostapd_constants.AP_DEFAULT_CHANNEL_2G)
-    security_mode = config.get("security_mode", None)
-    password = config.get("password", None)
-    if security_mode:
-        if not password:
-            password = generate_random_password(security_mode=security_mode)
-        security = hostapd_security.Security(security_mode, password)
-    else:
-        security = None
-
-    return {
-        "profile": profile,
-        "ssid": ssid,
-        "channel": channel,
-        "security": security,
-        "password": password,
-    }
-
-
-def get_soft_ap_params_from_config_or_default(config):
-    """Retrieves SoftAp parameters from antlion config or returns default settings.
-
-    Args:
-        config: dict, from antlion config, that may contain custom soft ap
-            parameters
-
-    Returns:
-        dict, containing all soft AP parameters
-    """
-    ssid = config.get("ssid", utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G))
-    connectivity_mode = config.get("connectivity_mode", CONNECTIVITY_MODE_LOCAL)
-    operating_band = config.get("operating_band", OPERATING_BAND_2G)
-    security_type = config.get("security_type", SECURITY_OPEN)
-    password = config.get("password", "")
-
-    # The SoftAP API uses 'open' security instead of None, '' password
-    # instead of None, and security_type instead of security_mode, hence
-    # the difference between ap_params and soft_ap_params
-    if security_type != SECURITY_OPEN and password == "":
-        password = generate_random_password(security_mode=security_type)
-
-    return {
-        "ssid": ssid,
-        "connectivity_mode": connectivity_mode,
-        "operating_band": operating_band,
-        "security_type": security_type,
-        "password": password,
-    }
-
-
-class StressTestIterationFailure(Exception):
-    """Used to differentiate a subtest failure from an actual exception"""
-
-
-class SoftApTest(base_test.WifiBaseTest):
-    """Tests for Fuchsia SoftAP
-
-    Testbed requirement:
-    * One Fuchsia device
-    * At least one client (Android) device
-        * For multi-client tests, at least two client (Android) devices are
-          required. Test will be skipped if less than two client devices are
-          present.
-    * For any tests that exercise client-mode (e.g. toggle tests, simultaneous
-        tests), a physical AP (whirlwind) is also required. Those tests will be
-        skipped if physical AP is not present.
-    """
-
-    def setup_class(self):
-        self.soft_ap_test_params = self.user_params.get("soft_ap_test_params", {})
-        self.dut = create_wlan_device(self.fuchsia_devices[0])
-
-        # TODO(fxb/51313): Add in device agnosticity for clients
-        # Create a wlan device and iperf client for each Android client
-        self.clients = []
-        self.iperf_clients_map = {}
-        for device in self.android_devices:
-            client_wlan_device = create_wlan_device(device)
-            self.clients.append(client_wlan_device)
-            self.iperf_clients_map[
-                client_wlan_device
-            ] = client_wlan_device.create_iperf_client()
-        self.primary_client = self.clients[0]
-
-        # Create an iperf server on the DUT, which will be used for any streaming.
-        self.iperf_server_settings = settings.from_config(
-            {
-                "user": self.dut.device.ssh_username,
-                "host": self.dut.device.ip,
-                "ssh_config": self.dut.device.ssh_config,
-            }
-        )
-        self.iperf_server = iperf_server.IPerfServerOverSsh(
-            self.iperf_server_settings, DEFAULT_IPERF_PORT, use_killall=True
-        )
-        self.iperf_server.start()
-
-        # Attempt to create an ap iperf server. AP is only required for tests
-        # that use client mode.
-        try:
-            self.access_point: AccessPoint = self.access_points[0]
-            self.ap_iperf_client = iperf_client.IPerfClientOverSsh(
-                self.access_point.ssh_settings
-            )
-        except AttributeError:
-            self.access_point = None
-            self.ap_iperf_client = None
-
-        self.iperf_clients_map[self.access_point] = self.ap_iperf_client
-
-    def teardown_class(self):
-        # Because this is using killall, it will stop all iperf processes
-        self.iperf_server.stop()
-
-    def setup_test(self):
-        for ad in self.android_devices:
-            ad.droid.wakeLockAcquireBright()
-            ad.droid.wakeUpNow()
-        for client in self.clients:
-            client.disconnect()
-            client.reset_wifi()
-            client.wifi_toggle_state(True)
-        self.stop_all_soft_aps()
-        if self.access_point:
-            self.access_point.stop_all_aps()
-        self.dut.disconnect()
-
-    def teardown_test(self):
-        for client in self.clients:
-            client.disconnect()
-        for ad in self.android_devices:
-            ad.droid.wakeLockRelease()
-            ad.droid.goToSleepNow()
-        self.stop_all_soft_aps()
-        if self.access_point:
-            self.download_ap_logs()
-            self.access_point.stop_all_aps()
-        self.dut.disconnect()
-
-    def start_soft_ap(self, settings):
-        """Starts a softAP on Fuchsia device.
-
-        Args:
-            settings: a dict containing softAP configuration params
-                ssid: string, SSID of softAP network
-                security_type: string, security type of softAP network
-                    - 'none', 'wep', 'wpa', 'wpa2', 'wpa3'
-                password: string, password if applicable
-                connectivity_mode: string, connecitivity_mode for softAP
-                    - 'local_only', 'unrestricted'
-                operating_band: string, band for softAP network
-                    - 'any', 'only_5_ghz', 'only_2_4_ghz'
-        """
-        ssid = settings["ssid"]
-        security_type = settings["security_type"]
-        password = settings.get("password", "")
-        connectivity_mode = settings["connectivity_mode"]
-        operating_band = settings["operating_band"]
-
-        self.log.info("Starting SoftAP on DUT with settings: %s" % settings)
-
-        response = self.dut.device.sl4f.wlan_ap_policy_lib.wlanStartAccessPoint(
-            ssid, security_type, password, connectivity_mode, operating_band
-        )
-        if response.get("error"):
-            raise EnvironmentError(
-                "SL4F: Failed to setup SoftAP. Err: %s" % response["error"]
-            )
-
-        self.log.info("SoftAp network (%s) is up." % ssid)
-
-    def stop_soft_ap(self, settings):
-        """Stops a specific SoftAP On Fuchsia device.
-
-        Args:
-            settings: a dict containing softAP config params (see start_soft_ap)
-                for details
-
-        Raises:
-            EnvironmentError, if StopSoftAP call fails.
-        """
-        ssid = settings["ssid"]
-        security_type = settings["security_type"]
-        password = settings.get("password", "")
-
-        response = self.dut.device.sl4f.wlan_ap_policy_lib.wlanStopAccessPoint(
-            ssid, security_type, password
-        )
-        if response.get("error"):
-            raise EnvironmentError(
-                "SL4F: Failed to stop SoftAP. Err: %s" % response["error"]
-            )
-
-    def stop_all_soft_aps(self):
-        """Stops all SoftAPs on Fuchsia Device.
-
-        Raises:
-            EnvironmentError, if StopAllAps call fails.
-        """
-        response = self.dut.device.sl4f.wlan_ap_policy_lib.wlanStopAllAccessPoint()
-        if response.get("error"):
-            raise EnvironmentError(
-                "SL4F: Failed to stop all SoftAPs. Err: %s" % response["error"]
-            )
-
-    def associate_with_soft_ap(self, device, soft_ap_settings):
-        """Associates client device with softAP on Fuchsia device.
-
-        Args:
-            device: wlan_device to associate with the softAP
-            settings: a dict containing softAP config params (see start_soft_ap)
-                for details
-
-        Raises:
-            TestFailure, if association fails
-        """
-        self.log.info(
-            "Attempting to associate client %s with SoftAP on FuchsiaDevice "
-            "(%s)." % (device.identifier, self.dut.identifier)
-        )
-
-        check_connectivity = (
-            soft_ap_settings["connectivity_mode"] == CONNECTIVITY_MODE_UNRESTRICTED
-        )
-        associated = device.associate(
-            soft_ap_settings["ssid"],
-            target_pwd=soft_ap_settings.get("password"),
-            target_security=hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
-                soft_ap_settings["security_type"], None
-            ),
-            check_connectivity=check_connectivity,
-        )
-
-        if not associated:
-            self.log.error("Failed to connect to SoftAp.")
-            return False
-
-        self.log.info("Client successfully associated with SoftAP.")
-        return True
-
-    def disconnect_from_soft_ap(self, device):
-        """Disconnects client device from SoftAP.
-
-        Args:
-            device: wlan_device to disconnect from SoftAP
-        """
-        self.log.info("Disconnecting device %s from SoftAP." % device.identifier)
-        device.disconnect()
-
-    def get_device_test_interface(self, device, role=None, channel=None):
-        """Retrieves test interface from a provided device, which can be the
-        FuchsiaDevice DUT, the AccessPoint, or an AndroidClient.
-
-        Args:
-            device: the device do get the test interface from. Either
-                FuchsiaDevice (DUT), Android client, or AccessPoint.
-            role: str, either "client" or "ap". Required for FuchsiaDevice (DUT)
-            channel: int, channel of the ap network. Required for AccessPoint.
-
-        Returns:
-            String, name of test interface on given device.
-        """
-
-        if device is self.dut:
-            device.device.wlan_controller.update_wlan_interfaces()
-            if role == INTERFACE_ROLE_CLIENT:
-                return device.device.wlan_client_test_interface_name
-            elif role == INTERFACE_ROLE_AP:
-                return device.device.wlan_ap_test_interface_name
-            else:
-                raise ValueError("Unsupported interface role: %s" % role)
-        elif isinstance(device, AccessPoint):
-            if not channel:
-                raise ValueError("Must provide a channel to get AccessPoint interface")
-            if channel < 36:
-                return device.wlan_2g
-            else:
-                return device.wlan_5g
-        else:
-            return device.get_default_wlan_test_interface()
-
-    def wait_for_ipv4_address(self, device, interface_name, timeout=DEFAULT_TIMEOUT):
-        """Waits for interface on a wlan_device to get an ipv4 address.
-
-        Args:
-            device: wlan_device or AccessPoint to check interface
-            interface_name: name of the interface to check
-            timeout: seconds to wait before raising an error
-
-        Raises:
-            ValueError, if interface does not have an ipv4 address after timeout
-        """
-        if isinstance(device, AccessPoint):
-            comm_channel = device.ssh
-        else:
-            comm_channel = device.device
-        end_time = time.time() + timeout
-        while time.time() < end_time:
-            ips = utils.get_interface_ip_addresses(comm_channel, interface_name)
-            if len(ips["ipv4_private"]) > 0:
-                self.log.info(
-                    "Device %s interface %s has ipv4 address %s"
-                    % (device.identifier, interface_name, ips["ipv4_private"][0])
-                )
-                return ips["ipv4_private"][0]
-            else:
-                time.sleep(1)
-        raise ConnectionError(
-            "After %s seconds, device %s still does not have an ipv4 address "
-            "on interface %s." % (timeout, device.identifier, interface_name)
-        )
-
-    def device_can_ping_addr(self, device, dest_ip, timeout=DEFAULT_TIMEOUT):
-        """Verify wlan_device can ping a destination ip.
-
-        Args:
-            device: wlan_device to initiate ping
-            dest_ip: ip to ping from wlan_device
-
-        Raises:
-            TestFailure, if ping fails
-        """
-        end_time = time.time() + timeout
-        while time.time() < end_time:
-            with utils.SuppressLogOutput():
-                ping_result = device.can_ping(dest_ip)
-
-            if ping_result:
-                self.log.info(
-                    "Ping successful from device %s to dest ip %s."
-                    % (device.identifier, dest_ip)
-                )
-                return True
-            else:
-                self.log.debug(
-                    "Device %s could not ping dest ip %s. Retrying in 1 second."
-                    % (device.identifier, dest_ip)
-                )
-                time.sleep(1)
-        else:
-            self.log.info(
-                "Failed to ping from device %s to dest ip %s."
-                % (device.identifier, dest_ip)
-            )
-            return False
-
-    def run_iperf_traffic(self, ip_client, server_address, server_port=5201):
-        """Runs traffic between client and ap an verifies throughput.
-
-        Args:
-            ip_client: iperf client to use
-            server_address: ipv4 address of the iperf server to use
-            server_port: port of the iperf server
-
-        Raises:
-            TestFailure, if no traffic passes in either direction
-        """
-        ip_client_identifier = self.get_iperf_client_identifier(ip_client)
-
-        self.log.info(
-            "Running traffic from iperf client %s to iperf server %s."
-            % (ip_client_identifier, server_address)
-        )
-        client_to_ap_path = ip_client.start(
-            server_address, "-i 1 -t 10 -J -p %s" % server_port, "client_to_soft_ap"
-        )
-
-        client_to_ap_result = iperf_server.IPerfResult(client_to_ap_path)
-        if not client_to_ap_result.avg_receive_rate:
-            raise ConnectionError(
-                "Failed to pass traffic from iperf client %s to iperf server %s."
-                % (ip_client_identifier, server_address)
-            )
-
-        self.log.info(
-            "Passed traffic from iperf client %s to iperf server %s with avg "
-            "rate of %s MB/s."
-            % (
-                ip_client_identifier,
-                server_address,
-                client_to_ap_result.avg_receive_rate,
-            )
-        )
-
-        self.log.info(
-            "Running traffic from iperf server %s to iperf client %s."
-            % (server_address, ip_client_identifier)
-        )
-        ap_to_client_path = ip_client.start(
-            server_address, "-i 1 -t 10 -R -J -p %s" % server_port, "soft_ap_to_client"
-        )
-
-        ap_to_client_result = iperf_server.IPerfResult(ap_to_client_path)
-        if not ap_to_client_result.avg_receive_rate:
-            raise ConnectionError(
-                "Failed to pass traffic from iperf server %s to iperf client %s."
-                % (server_address, ip_client_identifier)
-            )
-
-        self.log.info(
-            "Passed traffic from iperf server %s to iperf client %s with avg "
-            "rate of %s MB/s."
-            % (
-                server_address,
-                ip_client_identifier,
-                ap_to_client_result.avg_receive_rate,
-            )
-        )
-
-    def run_iperf_traffic_parallel_process(
-        self, ip_client, server_address, error_queue, server_port=5201
-    ):
-        """Executes run_iperf_traffic using a queue to capture errors. Used
-        when running iperf in a parallel process.
-
-        Args:
-            ip_client: iperf client to use
-            server_address: ipv4 address of the iperf server to use
-            error_queue: multiprocessing queue to capture errors
-            server_port: port of the iperf server
-        """
-        try:
-            self.run_iperf_traffic(ip_client, server_address, server_port=server_port)
-        except ConnectionError as err:
-            error_queue.put(
-                "In iperf process from %s to %s: %s"
-                % (self.get_iperf_client_identifier(ip_client), server_address, err)
-            )
-
-    def get_iperf_client_identifier(self, ip_client):
-        """Retrieves an indentifer string from iperf client, for logging.
-
-        Args:
-            ip_client: iperf client to grab identifier from
-        """
-        if type(ip_client) == iperf_client.IPerfClientOverAdb:
-            return ip_client._android_device_or_serial.serial
-        return ip_client._ssh_settings.hostname
-
-    def device_is_connected_to_ap(
-        self, client, ap, channel=None, check_traffic=False, timeout=DEFAULT_TIMEOUT
-    ):
-        """Returns whether client device can ping (and optionally pass traffic)
-        to the ap device.
-
-        Args:
-            client: device that should be associated. Either FuchsiaDevice (DUT)
-                or Android client
-            ap: device acting as AP. Either FuchsiaDevice (DUT) or AccessPoint.
-            channel: int, channel the AP is using. Required if ap is an
-                AccessPoint object.
-            check_traffic: bool, whether to attempt to pass traffic between
-                client and ap devices.
-            timeout: int, time in seconds to wait for devices to have ipv4
-                addresses
-        """
-        try:
-            # Get interfaces
-            client_interface = self.get_device_test_interface(
-                client, INTERFACE_ROLE_CLIENT
-            )
-            ap_interface = self.get_device_test_interface(
-                ap, role=INTERFACE_ROLE_AP, channel=channel
-            )
-
-            # Get addresses
-            client_ipv4 = self.wait_for_ipv4_address(
-                client, client_interface, timeout=timeout
-            )
-            ap_ipv4 = self.wait_for_ipv4_address(ap, ap_interface, timeout=timeout)
-        except ConnectionError as err:
-            self.log.error("Failed to retrieve interfaces and addresses. Err: %s" % err)
-            return False
-
-        if not self.device_can_ping_addr(client, ap_ipv4):
-            self.log.error("Failed to ping from client to ap.")
-            return False
-
-        if not self.device_can_ping_addr(ap, client_ipv4):
-            self.log.error("Failed to ping from ap to client.")
-            return False
-
-        if check_traffic:
-            try:
-                if client is self.dut:
-                    self.run_iperf_traffic(self.iperf_clients_map[ap], client_ipv4)
-                else:
-                    self.run_iperf_traffic(self.iperf_clients_map[client], ap_ipv4)
-            except ConnectionError as err:
-                self.log.error("Failed to run traffic between DUT and AP.")
-                return False
-        return True
-
-    def verify_soft_ap_connectivity_from_state(self, state, client):
-        """Verifies SoftAP state based on a client connection.
-
-        Args:
-            state: bool, whether SoftAP should be up
-            client: SoftApClient, to verify connectivity (or lack therof)
-        """
-        if state == STATE_UP:
-            return self.device_is_connected_to_ap(client, self.dut)
-        else:
-            with utils.SuppressLogOutput():
-                try:
-                    return not self.device_is_connected_to_ap(
-                        client, self.dut, timeout=DEFAULT_NO_ADDR_EXPECTED_TIMEOUT
-                    )
-                # Allow a failed to find ap interface error
-                except LookupError as err:
-                    self.log.debug("Hit expected LookupError: %s" % err)
-                    return True
-
-    def verify_client_mode_connectivity_from_state(self, state, channel):
-        """Verifies client mode state based on DUT-AP connection.
-
-        Args:
-            state: bool, whether client mode should be up
-            channel: int, channel of the APs network
-        """
-        if state == STATE_UP:
-            return self.device_is_connected_to_ap(
-                self.dut, self.access_point, channel=channel
-            )
-        else:
-            with utils.SuppressLogOutput():
-                try:
-                    return not self.device_is_connected_to_ap(
-                        self.dut,
-                        self.access_point,
-                        channel=channel,
-                        timeout=DEFAULT_NO_ADDR_EXPECTED_TIMEOUT,
-                    )
-                # Allow a failed to find client interface error
-                except LookupError as err:
-                    self.log.debug("Hit expected LookupError: %s" % err)
-                    return True
-
-    # Test Types
-
-    def verify_soft_ap_associate_only(self, client, soft_ap_settings):
-        if not self.associate_with_soft_ap(client, soft_ap_settings):
-            asserts.fail("Failed to associate client with SoftAP.")
-
-    def verify_soft_ap_associate_and_ping(self, client, soft_ap_settings):
-        self.verify_soft_ap_associate_only(client, soft_ap_settings)
-        if not self.device_is_connected_to_ap(client, self.dut):
-            asserts.fail("Client and SoftAP could not ping eachother.")
-
-    def verify_soft_ap_associate_and_pass_traffic(self, client, settings):
-        self.verify_soft_ap_associate_only(client, settings)
-        if not self.device_is_connected_to_ap(client, self.dut, check_traffic=True):
-            asserts.fail(
-                "Client and SoftAP not responding to pings and passing traffic "
-                "as expected."
-            )
-
-    # Runners for Generated Test Cases
-
-    def run_soft_ap_association_stress_test(self, settings):
-        """Sets up a SoftAP, and repeatedly associates and disassociates a
-        client.
-
-        Args:
-            settings: test configuration settings, see
-                test_soft_ap_association_stress for details
-        """
-        client = settings["client"]
-        soft_ap_params = settings["soft_ap_params"]
-        test_type = settings["test_type"]
-        if not test_type in TEST_TYPES:
-            raise ValueError("Unrecognized test type %s" % test_type)
-        iterations = settings["iterations"]
-        self.log.info(
-            "Running association stress test type %s in iteration %s times"
-            % (test_type, iterations)
-        )
-
-        self.start_soft_ap(soft_ap_params)
-
-        passed_count = 0
-        for run in range(iterations):
-            try:
-                self.log.info("Starting SoftAp association run %s" % str(run + 1))
-
-                if test_type == TEST_TYPE_ASSOCIATE_ONLY:
-                    self.verify_soft_ap_associate_only(client, soft_ap_params)
-
-                elif test_type == TEST_TYPE_ASSOCIATE_AND_PING:
-                    self.verify_soft_ap_associate_and_ping(client, soft_ap_params)
-
-                elif test_type == TEST_TYPE_ASSOCIATE_AND_PASS_TRAFFIC:
-                    self.verify_soft_ap_associate_and_pass_traffic(
-                        client, soft_ap_params
-                    )
-
-                else:
-                    raise AttributeError("Invalid test type: %s" % test_type)
-
-            except signals.TestFailure as err:
-                self.log.error(
-                    "SoftAp association stress run %s failed. Err: %s"
-                    % (str(run + 1), err.details)
-                )
-            else:
-                self.log.info(
-                    "SoftAp association stress run %s successful." % str(run + 1)
-                )
-                passed_count += 1
-
-        if passed_count < iterations:
-            asserts.fail(
-                "SoftAp association stress test passed on %s/%s runs."
-                % (passed_count, iterations)
-            )
-
-        asserts.explicit_pass(
-            "SoftAp association stress test passed on %s/%s runs."
-            % (passed_count, iterations)
-        )
-
-    # Alternate SoftAP and Client mode test
-
-    def run_soft_ap_and_client_mode_alternating_test(self, settings):
-        """Runs a single soft_ap and client alternating stress test.
-
-        See test_soft_ap_and_client_mode_alternating_stress for details.
-        """
-        iterations = settings["iterations"]
-        pass_count = 0
-        current_soft_ap_state = STATE_DOWN
-        current_client_mode_state = STATE_DOWN
-
-        self.client_mode_toggle_pre_test(settings)
-        for iteration in range(iterations):
-            passes = True
-
-            # Attempt to toggle SoftAP on, then off. If the first toggle fails
-            # to occur, exit early.
-            for _ in range(2):
-                (current_soft_ap_state, err) = self.run_toggle_iteration_func(
-                    self.soft_ap_toggle_test_iteration, settings, current_soft_ap_state
-                )
-                if err:
-                    self.log.error(
-                        "Iteration %s failed. Err: %s" % (str(iteration + 1), err)
-                    )
-                    passes = False
-                if current_soft_ap_state == STATE_DOWN:
-                    break
-
-            # Attempt to toggle Client mode on, then off. If the first toggle,
-            # fails to occur, exit early.
-            for _ in range(2):
-                (current_client_mode_state, err) = self.run_toggle_iteration_func(
-                    self.client_mode_toggle_test_iteration,
-                    settings,
-                    current_client_mode_state,
-                )
-                if err:
-                    self.log.error(
-                        "Iteration %s failed. Err: %s" % (str(iteration + 1), err)
-                    )
-                    passes = False
-                if current_client_mode_state == STATE_DOWN:
-                    break
-
-            if passes:
-                pass_count += 1
-
-        if pass_count == iterations:
-            asserts.explicit_pass(
-                "Toggle SoftAP and client mode stress test passed %s/%s times."
-                % (pass_count, iterations)
-            )
-        else:
-            asserts.fail(
-                "Toggle SoftAP and client mode stress test only passed %s/%s "
-                "times." % (pass_count, iterations)
-            )
-
-    # Toggle Stress Test Helper Functions
-
-    def run_toggle_stress_test(self, settings):
-        """Runner function for toggle stress tests.
-
-        Repeats some test function through stress test iterations, logging
-        failures, tracking pass rate, managing states, etc.
-
-        Args:
-            settings: dict, stress test settings
-
-        Asserts:
-            PASS: if all iterations of the test function pass
-            FAIL: if any iteration of the test function fails
-        """
-        test_runner_func = settings["test_runner_func"]
-        pre_test_func = settings.get("pre_test_func", None)
-        iterations = settings["iterations"]
-        if pre_test_func:
-            pre_test_func(settings)
-
-        pass_count = 0
-        current_state = STATE_DOWN
-        for iteration in range(iterations):
-            (current_state, err) = self.run_toggle_iteration_func(
-                test_runner_func, settings, current_state
-            )
-            if err:
-                self.log.error(
-                    "Iteration %s failed. Err: %s" % (str(iteration + 1), err)
-                )
-            else:
-                pass_count += 1
-
-        if pass_count == iterations:
-            asserts.explicit_pass(
-                "Stress test passed %s/%s times." % (pass_count, iterations)
-            )
-        else:
-            asserts.fail(
-                "Stress test only passed %s/%s " "times." % (pass_count, iterations)
-            )
-
-    def run_toggle_iteration_func(self, func, settings, current_state):
-        """Runs a toggle iteration function, updating the current state
-        based on what the toggle iteration function raises.
-
-        Used for toggle stress tests.
-
-        Note on EnvironmentError vs StressTestIterationFailure:
-            StressTestIterationFailure is raised by func when the toggle occurs
-                but connectivty or some other post-toggle check fails (i.e. the
-                next iteration should toggle to the next state.)
-
-            EnvironmentError is raise by func when the toggle itself fails (i.e
-                the next iteration should retry the same toggle again.)
-
-        Args:
-            func: toggle iteration func to run (e.g soft_ap_toggle_iteration)
-            settings: dict, stress test settings
-            current_state: bool, the current state of the mode being toggled
-
-        Returns:
-            (new_state, err):
-                new_state: bool, state of the mode after toggle attempt
-                err: exception, if any are raise, else None
-        """
-        try:
-            func(settings, current_state)
-        except EnvironmentError as err:
-            return (current_state, err)
-        except StressTestIterationFailure as err:
-            return (not current_state, err)
-        else:
-            return (not current_state, None)
-
-    # Stress Test Toggle Functions
-
-    def start_soft_ap_and_verify_connected(self, client, soft_ap_params):
-        """Sets up SoftAP, associates a client, then verifies connection.
-
-        Args:
-            client: SoftApClient, client to use to verify SoftAP
-            soft_ap_params: dict, containing parameters to setup softap
-
-        Raises:
-            StressTestIterationFailure, if toggle occurs, but connection
-            is not functioning as expected
-        """
-        # Change SSID every time, to avoid client connection issues.
-        soft_ap_params["ssid"] = utils.rand_ascii_str(
-            hostapd_constants.AP_SSID_LENGTH_2G
-        )
-        self.start_soft_ap(soft_ap_params)
-        associated = self.associate_with_soft_ap(client, soft_ap_params)
-        if not associated:
-            raise StressTestIterationFailure(
-                "Failed to associated client to DUT SoftAP. "
-                "Continuing with iterations."
-            )
-
-        if not self.verify_soft_ap_connectivity_from_state(STATE_UP, client):
-            raise StressTestIterationFailure(
-                "Failed to ping between client and DUT. Continuing " "with iterations."
-            )
-
-    def stop_soft_ap_and_verify_disconnected(self, client, soft_ap_params):
-        """Tears down SoftAP, and verifies connection is down.
-
-        Args:
-            client: SoftApClient, client to use to verify SoftAP
-            soft_ap_params: dict, containing parameters of SoftAP to teardown
-
-        Raise:
-            EnvironmentError, if client and AP can still communicate
-        """
-        self.log.info("Stopping SoftAP on DUT.")
-        self.stop_soft_ap(soft_ap_params)
-
-        if not self.verify_soft_ap_connectivity_from_state(STATE_DOWN, client):
-            raise EnvironmentError(
-                "Client can still ping DUT. Continuing with " "iterations."
-            )
-
-    def start_client_mode_and_verify_connected(self, ap_params):
-        """Connects DUT to AP in client mode and verifies connection
-
-        Args:
-            ap_params: dict, containing parameters of the AP network
-
-        Raises:
-            EnvironmentError, if DUT fails to associate altogether
-            StressTestIterationFailure, if DUT associates but connection is not
-                functioning as expected.
-        """
-        ap_ssid = ap_params["ssid"]
-        ap_password = ap_params["password"]
-        ap_channel = ap_params["channel"]
-        ap_security = ap_params.get("security")
-
-        if ap_security:
-            ap_security_mode = ap_security.security_mode_string
-        else:
-            ap_security_mode = None
-
-        self.log.info("Associating DUT with AP network: %s" % ap_ssid)
-        associated = self.dut.associate(
-            target_ssid=ap_ssid,
-            target_pwd=ap_password,
-            target_security=hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
-                ap_security_mode, None
-            ),
-        )
-        if not associated:
-            raise EnvironmentError("Failed to associate DUT in client mode.")
-        else:
-            self.log.info("Association successful.")
-
-        if not self.verify_client_mode_connectivity_from_state(STATE_UP, ap_channel):
-            raise StressTestIterationFailure("Failed to ping AP from DUT.")
-
-    def stop_client_mode_and_verify_disconnected(self, ap_params):
-        """Disconnects DUT from AP and verifies connection is down.
-
-        Args:
-            ap_params: dict, containing parameters of the AP network
-
-        Raises:
-            EnvironmentError, if DUT and AP can still communicate
-        """
-        self.log.info("Disconnecting DUT from AP.")
-        self.dut.disconnect()
-        if not self.verify_client_mode_connectivity_from_state(
-            STATE_DOWN, ap_params["channel"]
-        ):
-            raise EnvironmentError("DUT can still ping AP.")
-
-    # Toggle Stress Test Iteration and Pre-Test Functions
-
-    # SoftAP Toggle Stress Test Helper Functions
-
-    def soft_ap_toggle_test_iteration(self, settings, current_state):
-        """Runs a single iteration of SoftAP toggle stress test
-
-        Args:
-            settings: dict, containing test settings
-            current_state: bool, current state of SoftAP (True if up,
-                else False)
-
-        Raises:
-            StressTestIterationFailure, if toggle occurs but mode isn't
-                functioning correctly.
-            EnvironmentError, if toggle fails to occur at all
-        """
-        soft_ap_params = settings["soft_ap_params"]
-        self.log.info("Toggling SoftAP %s." % ("down" if current_state else "up"))
-
-        if current_state == STATE_DOWN:
-            self.start_soft_ap_and_verify_connected(self.primary_client, soft_ap_params)
-
-        else:
-            self.stop_soft_ap_and_verify_disconnected(
-                self.primary_client, soft_ap_params
-            )
-
-    # Client Mode Toggle Stress Test Helper Functions
-
-    def client_mode_toggle_pre_test(self, settings):
-        """Prepares the AP before client mode toggle tests
-
-        Args:
-            settings: dict, stress test settings
-
-        Raises:
-            ConnectionError, if AP setup fails
-        """
-        ap_params = settings["ap_params"]
-        ap_channel = ap_params["channel"]
-        ap_profile = ap_params.pop("profile")
-        self.log.info("Setting up AP with params: %s" % ap_params)
-        setup_ap(access_point=self.access_point, profile_name=ap_profile, **ap_params)
-        # Confirms AP assigned itself an address
-        ap_interface = self.get_device_test_interface(
-            self.access_point, channel=ap_channel
-        )
-        self.wait_for_ipv4_address(self.access_point, ap_interface)
-
-    def client_mode_toggle_test_iteration(self, settings, current_state):
-        """Runs a single iteration of client mode toggle stress test
-
-        Args:
-            settings: dict, containing test settings
-            current_state: bool, current state of client mode (True if up,
-                else False)
-
-        Raises:
-            StressTestIterationFailure, if toggle occurs but mode isn't
-                functioning correctly.
-            EnvironmentError, if toggle fails to occur at all
-        """
-        ap_params = settings["ap_params"]
-        self.log.info("Toggling client mode %s" % ("off" if current_state else "on"))
-
-        if current_state == STATE_DOWN:
-            self.start_client_mode_and_verify_connected(ap_params)
-
-        else:
-            self.stop_client_mode_and_verify_disconnected(ap_params)
-
-    # Toggle SoftAP with Client Mode Up Test Helper Functions
-
-    def soft_ap_toggle_with_client_mode_pre_test(self, settings):
-        """Sets up and verifies client mode before SoftAP toggle test.
-        Args:
-            settings: dict, stress test settings
-
-        Raises:
-            ConnectionError, if client mode setup fails
-        """
-        self.client_mode_toggle_pre_test(settings)
-        try:
-            self.start_client_mode_and_verify_connected(settings["ap_params"])
-        except StressTestIterationFailure as err:
-            # This prevents it being treated as a routine error
-            raise ConnectionError(
-                "Failed to set up DUT client mode before SoftAP toggle test."
-                "Err: %s" % err
-            )
-
-    def soft_ap_toggle_with_client_mode_iteration(
-        self,
-        settings,
-        current_state,
-    ):
-        """Runs single iteration of SoftAP toggle stress with client mode test.
-
-        Args:
-            settings: dict, containing test settings
-            current_state: bool, current state of SoftAP (True if up,
-                else False)
-
-        Raises:
-            StressTestIterationFailure, if toggle occurs but mode isn't
-                functioning correctly.
-            EnvironmentError, if toggle fails to occur at all
-        """
-        ap_params = settings["ap_params"]
-        ap_channel = ap_params["channel"]
-        self.soft_ap_toggle_test_iteration(settings, current_state)
-        if not self.device_is_connected_to_ap(
-            self.dut, self.access_point, channel=ap_channel
-        ):
-            raise StressTestIterationFailure(
-                "DUT client mode is no longer functional after SoftAP toggle."
-            )
-
-    # Toggle Client Mode with SoftAP Up Test Helper Functions
-
-    def client_mode_toggle_with_soft_ap_pre_test(self, settings):
-        """Sets up and verifies softap before client mode toggle test.
-        Args:
-            settings: dict, stress test settings
-
-        Raises:
-            ConnectionError, if softap setup fails
-        """
-        self.client_mode_toggle_pre_test(settings)
-        try:
-            self.start_soft_ap_and_verify_connected(
-                self.primary_client, settings["soft_ap_params"]
-            )
-        except StressTestIterationFailure as err:
-            # This prevents it being treated as a routine error
-            raise ConnectionError(
-                "Failed to set up SoftAP before client mode toggle test. Err: %s" % err
-            )
-
-    def client_mode_toggle_with_soft_ap_iteration(self, settings, current_state):
-        """Runs single iteration of client mode toggle stress with SoftAP test.
-
-        Args:
-            settings: dict, containing test settings
-            current_state: bool, current state of client mode (True if up,
-                else False)
-
-        Raises:
-            StressTestIterationFailure, if toggle occurs but mode isn't
-                functioning correctly.
-            EnvironmentError, if toggle fails to occur at all
-        """
-        self.client_mode_toggle_test_iteration(settings, current_state)
-        if not self.device_is_connected_to_ap(self.primary_client, self.dut):
-            raise StressTestIterationFailure(
-                "SoftAP is no longer functional after client mode toggle."
-            )
-
-    # Toggle SoftAP and Client Mode Randomly
-
-    def run_soft_ap_and_client_mode_random_toggle_stress_test(self, settings):
-        """Runner function for SoftAP and client mode random toggle tests.
-
-        Each iteration, randomly chooses if a mode will be toggled or not.
-
-        Args:
-            settings: dict, containing test settings
-        """
-        iterations = settings["iterations"]
-        pass_count = 0
-        current_soft_ap_state = STATE_DOWN
-        current_client_mode_state = STATE_DOWN
-        ap_channel = settings["ap_params"]["channel"]
-
-        self.client_mode_toggle_pre_test(settings)
-        for iteration in range(iterations):
-            self.log.info(
-                "Starting iteration %s out of %s." % (str(iteration + 1), iterations)
-            )
-            passes = True
-
-            # Randomly determine if softap, client mode, or both should
-            # be toggled.
-            rand_toggle_choice = random.randrange(0, 3)
-            if rand_toggle_choice <= 1:
-                (current_soft_ap_state, err) = self.run_toggle_iteration_func(
-                    self.soft_ap_toggle_test_iteration, settings, current_soft_ap_state
-                )
-                if err:
-                    self.log.error(
-                        "Iteration %s failed toggling SoftAP. Err: %s"
-                        % (str(iteration + 1), err)
-                    )
-                    passes = False
-            if rand_toggle_choice >= 1:
-                (current_client_mode_state, err) = self.run_toggle_iteration_func(
-                    self.client_mode_toggle_test_iteration,
-                    settings,
-                    current_client_mode_state,
-                )
-                if err:
-                    self.log.error(
-                        "Iteration %s failed toggling client mode. Err: %s"
-                        % (str(iteration + 1), err)
-                    )
-                    passes = False
-
-            soft_ap_verified = self.verify_soft_ap_connectivity_from_state(
-                current_soft_ap_state, self.primary_client
-            )
-            client_mode_verified = self.verify_client_mode_connectivity_from_state(
-                current_client_mode_state, ap_channel
-            )
-
-            if not soft_ap_verified or not client_mode_verified:
-                passes = False
-            if passes:
-                pass_count += 1
-
-        if pass_count == iterations:
-            asserts.explicit_pass(
-                "Stress test passed %s/%s times." % (pass_count, iterations)
-            )
-        else:
-            asserts.fail(
-                "Stress test only passed %s/%s " "times." % (pass_count, iterations)
-            )
-
-    # Test Cases
-
-    def test_soft_ap_2g_open_local(self):
-        soft_ap_params = {
-            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
-            "security_type": SECURITY_OPEN,
-            "connectivity_mode": CONNECTIVITY_MODE_LOCAL,
-            "operating_band": OPERATING_BAND_2G,
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(
-            self.primary_client, soft_ap_params
-        )
-
-    def test_soft_ap_5g_open_local(self):
-        soft_ap_params = {
-            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            "security_type": SECURITY_OPEN,
-            "connectivity_mode": CONNECTIVITY_MODE_LOCAL,
-            "operating_band": OPERATING_BAND_5G,
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(
-            self.primary_client, soft_ap_params
-        )
-
-    def test_soft_ap_any_open_local(self):
-        soft_ap_params = {
-            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            "security_type": SECURITY_OPEN,
-            "connectivity_mode": CONNECTIVITY_MODE_LOCAL,
-            "operating_band": OPERATING_BAND_ANY,
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(
-            self.primary_client, soft_ap_params
-        )
-
-    def test_soft_ap_2g_wep_local(self):
-        soft_ap_params = {
-            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
-            "security_type": SECURITY_WEP,
-            "password": generate_random_password(security_mode=SECURITY_WEP),
-            "connectivity_mode": CONNECTIVITY_MODE_LOCAL,
-            "operating_band": OPERATING_BAND_2G,
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(
-            self.primary_client, soft_ap_params
-        )
-
-    def test_soft_ap_5g_wep_local(self):
-        soft_ap_params = {
-            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            "security_type": SECURITY_WEP,
-            "password": generate_random_password(security_mode=SECURITY_WEP),
-            "connectivity_mode": CONNECTIVITY_MODE_LOCAL,
-            "operating_band": OPERATING_BAND_5G,
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(
-            self.primary_client, soft_ap_params
-        )
-
-    def test_soft_ap_any_wep_local(self):
-        soft_ap_params = {
-            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            "security_type": SECURITY_WEP,
-            "password": generate_random_password(security_mode=SECURITY_WEP),
-            "connectivity_mode": CONNECTIVITY_MODE_LOCAL,
-            "operating_band": OPERATING_BAND_ANY,
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(
-            self.primary_client,
-        )
-
-    def test_soft_ap_2g_wpa_local(self):
-        soft_ap_params = {
-            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
-            "security_type": SECURITY_WPA,
-            "password": generate_random_password(),
-            "connectivity_mode": CONNECTIVITY_MODE_LOCAL,
-            "operating_band": OPERATING_BAND_2G,
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(
-            self.primary_client, soft_ap_params
-        )
-
-    def test_soft_ap_5g_wpa_local(self):
-        soft_ap_params = {
-            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            "security_type": SECURITY_WPA,
-            "password": generate_random_password(),
-            "connectivity_mode": CONNECTIVITY_MODE_LOCAL,
-            "operating_band": OPERATING_BAND_5G,
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(
-            self.primary_client, soft_ap_params
-        )
-
-    def test_soft_ap_any_wpa_local(self):
-        soft_ap_params = {
-            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            "security_type": SECURITY_WPA,
-            "password": generate_random_password(),
-            "connectivity_mode": CONNECTIVITY_MODE_LOCAL,
-            "operating_band": OPERATING_BAND_ANY,
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(
-            self.primary_client, soft_ap_params
-        )
-
-    def test_soft_ap_2g_wpa2_local(self):
-        soft_ap_params = {
-            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
-            "security_type": SECURITY_WPA2,
-            "password": generate_random_password(),
-            "connectivity_mode": CONNECTIVITY_MODE_LOCAL,
-            "operating_band": OPERATING_BAND_2G,
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(
-            self.primary_client, soft_ap_params
-        )
-
-    def test_soft_ap_5g_wpa2_local(self):
-        soft_ap_params = {
-            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            "security_type": SECURITY_WPA2,
-            "password": generate_random_password(),
-            "connectivity_mode": CONNECTIVITY_MODE_LOCAL,
-            "operating_band": OPERATING_BAND_5G,
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(
-            self.primary_client, soft_ap_params
-        )
-
-    def test_soft_ap_any_wpa2_local(self):
-        soft_ap_params = {
-            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            "security_type": SECURITY_WPA2,
-            "password": generate_random_password(),
-            "connectivity_mode": CONNECTIVITY_MODE_LOCAL,
-            "operating_band": OPERATING_BAND_ANY,
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(
-            self.primary_client, soft_ap_params
-        )
-
-    def test_soft_ap_2g_wpa3_local(self):
-        soft_ap_params = {
-            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
-            "security_type": SECURITY_WPA3,
-            "password": generate_random_password(),
-            "connectivity_mode": CONNECTIVITY_MODE_LOCAL,
-            "operating_band": OPERATING_BAND_2G,
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(
-            self.primary_client, soft_ap_params
-        )
-
-    def test_soft_ap_5g_wpa3_local(self):
-        soft_ap_params = {
-            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            "security_type": SECURITY_WPA3,
-            "password": generate_random_password(),
-            "connectivity_mode": CONNECTIVITY_MODE_LOCAL,
-            "operating_band": OPERATING_BAND_ANY,
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(
-            self.primary_client, soft_ap_params
-        )
-
-    def test_soft_ap_any_wpa3_local(self):
-        soft_ap_params = {
-            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            "security_type": SECURITY_WPA3,
-            "password": generate_random_password(),
-            "connectivity_mode": CONNECTIVITY_MODE_LOCAL,
-            "operating_band": OPERATING_BAND_ANY,
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(
-            self.primary_client, soft_ap_params
-        )
-
-    def test_soft_ap_2g_open_unrestricted(self):
-        soft_ap_params = {
-            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
-            "security_type": SECURITY_OPEN,
-            "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED,
-            "operating_band": OPERATING_BAND_2G,
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(
-            self.primary_client, soft_ap_params
-        )
-
-    def test_soft_ap_5g_open_unrestricted(self):
-        soft_ap_params = {
-            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            "security_type": SECURITY_OPEN,
-            "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED,
-            "operating_band": OPERATING_BAND_5G,
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(
-            self.primary_client, soft_ap_params
-        )
-
-    def test_soft_ap_any_open_unrestricted(self):
-        soft_ap_params = {
-            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            "security_type": SECURITY_OPEN,
-            "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED,
-            "operating_band": OPERATING_BAND_ANY,
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(
-            self.primary_client, soft_ap_params
-        )
-
-    def test_soft_ap_2g_wep_unrestricted(self):
-        soft_ap_params = {
-            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
-            "security_type": SECURITY_WEP,
-            "password": generate_random_password(security_mode=SECURITY_WEP),
-            "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED,
-            "operating_band": OPERATING_BAND_2G,
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(
-            self.primary_client, soft_ap_params
-        )
-
-    def test_soft_ap_5g_wep_unrestricted(self):
-        soft_ap_params = {
-            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            "security_type": SECURITY_WEP,
-            "password": generate_random_password(security_mode=SECURITY_WEP),
-            "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED,
-            "operating_band": OPERATING_BAND_5G,
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(
-            self.primary_client, soft_ap_params
-        )
-
-    def test_soft_ap_any_wep_unrestricted(self):
-        soft_ap_params = {
-            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            "security_type": SECURITY_WEP,
-            "password": generate_random_password(security_mode=SECURITY_WEP),
-            "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED,
-            "operating_band": OPERATING_BAND_ANY,
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(
-            self.primary_client, soft_ap_params
-        )
-
-    def test_soft_ap_2g_wpa_unrestricted(self):
-        soft_ap_params = {
-            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
-            "security_type": SECURITY_WPA,
-            "password": generate_random_password(),
-            "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED,
-            "operating_band": OPERATING_BAND_2G,
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(
-            self.primary_client, soft_ap_params
-        )
-
-    def test_soft_ap_5g_wpa_unrestricted(self):
-        soft_ap_params = {
-            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            "security_type": SECURITY_WPA,
-            "password": generate_random_password(),
-            "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED,
-            "operating_band": OPERATING_BAND_5G,
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(
-            self.primary_client, soft_ap_params
-        )
-
-    def test_soft_ap_any_wpa_unrestricted(self):
-        soft_ap_params = {
-            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            "security_type": SECURITY_WPA,
-            "password": generate_random_password(),
-            "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED,
-            "operating_band": OPERATING_BAND_ANY,
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(
-            self.primary_client, soft_ap_params
-        )
-
-    def test_soft_ap_2g_wpa2_unrestricted(self):
-        soft_ap_params = {
-            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
-            "security_type": SECURITY_WPA2,
-            "password": generate_random_password(),
-            "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED,
-            "operating_band": OPERATING_BAND_2G,
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(
-            self.primary_client, soft_ap_params
-        )
-
-    def test_soft_ap_5g_wpa2_unrestricted(self):
-        soft_ap_params = {
-            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            "security_type": SECURITY_WPA2,
-            "password": generate_random_password(),
-            "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED,
-            "operating_band": OPERATING_BAND_5G,
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(
-            self.primary_client, soft_ap_params
-        )
-
-    def test_soft_ap_any_wpa2_unrestricted(self):
-        soft_ap_params = {
-            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            "security_type": SECURITY_WPA2,
-            "password": generate_random_password(),
-            "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED,
-            "operating_band": OPERATING_BAND_ANY,
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(
-            self.primary_client, soft_ap_params
-        )
-
-    def test_soft_ap_2g_wpa3_unrestricted(self):
-        soft_ap_params = {
-            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G),
-            "security_type": SECURITY_WPA3,
-            "password": generate_random_password(),
-            "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED,
-            "operating_band": OPERATING_BAND_2G,
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(
-            self.primary_client, soft_ap_params
-        )
-
-    def test_soft_ap_5g_wpa3_unrestricted(self):
-        soft_ap_params = {
-            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            "security_type": SECURITY_WPA3,
-            "password": generate_random_password(),
-            "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED,
-            "operating_band": OPERATING_BAND_ANY,
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(
-            self.primary_client, soft_ap_params
-        )
-
-    def test_soft_ap_any_wpa3_unrestricted(self):
-        soft_ap_params = {
-            "ssid": utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G),
-            "security_type": SECURITY_WPA3,
-            "password": generate_random_password(),
-            "connectivity_mode": CONNECTIVITY_MODE_UNRESTRICTED,
-            "operating_band": OPERATING_BAND_ANY,
-        }
-        self.start_soft_ap(soft_ap_params)
-        self.verify_soft_ap_associate_and_pass_traffic(
-            self.primary_client, soft_ap_params
-        )
-
-    def test_multi_client(self):
-        """Tests multi-client association with a single soft AP network.
-
-        This tests associates a variable length list of clients, verfying it can
-        can ping the SoftAP and pass traffic, and then verfies all previously
-        associated clients can still ping and pass traffic.
-
-        The same occurs in reverse for disassocations.
-
-        SoftAP parameters can be changed from default via ACTS config:
-        Example Config
-        "soft_ap_test_params" : {
-            "multi_client_test_params": {
-                "ssid": "testssid",
-                "security_type": "wpa2",
-                "password": "password",
-                "connectivity_mode": "local_only",
-                "operating_band": "only_2_4_ghz"
-            }
-        }
-        """
-        asserts.skip_if(len(self.clients) < 2, "Test requires at least 2 SoftAPClients")
-
-        test_params = self.soft_ap_test_params.get("multi_client_test_params", {})
-        soft_ap_params = get_soft_ap_params_from_config_or_default(
-            test_params.get("soft_ap_params", {})
-        )
-
-        self.start_soft_ap(soft_ap_params)
-
-        associated = []
-
-        for client in self.clients:
-            # Associate new client
-            self.verify_soft_ap_associate_and_ping(client, soft_ap_params)
-
-            # Verify previously associated clients still behave as expected
-            for associated_client in associated:
-                self.log.info(
-                    "Verifying previously associated client %s still functions correctly."
-                    % associated_client["device"].identifier
-                )
-                if not self.device_is_connected_to_ap(
-                    associated_client["device"], self.dut, check_traffic=True
-                ):
-                    asserts.fail(
-                        "Previously associated client %s failed checks after "
-                        "client %s associated."
-                        % (associated_client["device"].identifier, client.identifier)
-                    )
-
-            client_interface = self.get_device_test_interface(client)
-            client_ipv4 = self.wait_for_ipv4_address(client, client_interface)
-            associated.append({"device": client, "address": client_ipv4})
-
-        self.log.info("All devices successfully associated.")
-
-        self.log.info("Verifying all associated clients can ping eachother.")
-        for transmitter in associated:
-            for receiver in associated:
-                if transmitter != receiver:
-                    if not transmitter["device"].can_ping(receiver["address"]):
-                        asserts.fail(
-                            "Could not ping from one associated client (%s) to another (%s)."
-                            % (transmitter["address"], receiver["address"])
-                        )
-                    else:
-                        self.log.info(
-                            "Successfully pinged from associated client (%s) to another (%s)"
-                            % (transmitter["address"], receiver["address"])
-                        )
-
-        self.log.info(
-            "All associated clients can ping eachother. Beginning disassociations."
-        )
-
-        while len(associated) > 0:
-            # Disassociate client
-            client = associated.pop()["device"]
-            self.disconnect_from_soft_ap(client)
-
-            # Verify still connected clients still behave as expected
-            for associated_client in associated:
-                self.log.info(
-                    "Verifying still associated client %s still functions "
-                    "correctly." % associated_client["device"].identifier
-                )
-                if not self.device_is_connected_to_ap(
-                    associated_client["device"], self.dut, check_traffic=True
-                ):
-                    asserts.fail(
-                        "Previously associated client %s failed checks after"
-                        " client %s disassociated."
-                        % (associated_client["device"].identifier, client.identifier)
-                    )
-
-        self.log.info("All disassociations occurred smoothly.")
-
-    def test_simultaneous_soft_ap_and_client(self):
-        """Tests FuchsiaDevice DUT can act as a client and a SoftAP
-        simultaneously.
-
-        Raises:
-            ConnectionError: if DUT fails to connect as client
-            RuntimeError: if parallel processes fail to join
-            TestFailure: if DUT fails to pass traffic as either a client or an
-                AP
-        """
-        asserts.skip_if(not self.access_point, "No access point provided.")
-
-        self.log.info("Setting up AP using hostapd.")
-        test_params = self.soft_ap_test_params.get("soft_ap_and_client_test_params", {})
-
-        # Configure AP
-        ap_params = get_ap_params_from_config_or_default(
-            test_params.get("ap_params", {})
-        )
-
-        # Setup AP and associate DUT
-        ap_profile = ap_params.pop("profile")
-        setup_ap(access_point=self.access_point, profile_name=ap_profile, **ap_params)
-        try:
-            self.start_client_mode_and_verify_connected(ap_params)
-        except Exception as err:
-            asserts.fail("Failed to set up client mode. Err: %s" % err)
-
-        # Setup SoftAP
-        soft_ap_params = get_soft_ap_params_from_config_or_default(
-            test_params.get("soft_ap_params", {})
-        )
-        self.start_soft_ap_and_verify_connected(self.primary_client, soft_ap_params)
-
-        # Get FuchsiaDevice test interfaces
-        dut_ap_interface = self.get_device_test_interface(
-            self.dut, role=INTERFACE_ROLE_AP
-        )
-        dut_client_interface = self.get_device_test_interface(
-            self.dut, role=INTERFACE_ROLE_CLIENT
-        )
-
-        # Get FuchsiaDevice addresses
-        dut_ap_ipv4 = self.wait_for_ipv4_address(self.dut, dut_ap_interface)
-        dut_client_ipv4 = self.wait_for_ipv4_address(self.dut, dut_client_interface)
-
-        # Set up secondary iperf server of FuchsiaDevice
-        self.log.info("Setting up second iperf server on FuchsiaDevice DUT.")
-        secondary_iperf_server = iperf_server.IPerfServerOverSsh(
-            self.iperf_server_settings, DEFAULT_IPERF_PORT + 1, use_killall=True
-        )
-        secondary_iperf_server.start()
-
-        # Set up iperf client on AP
-        self.log.info("Setting up iperf client on AP.")
-        ap_iperf_client = iperf_client.IPerfClientOverSsh(
-            self.access_point.ssh_settings
-        )
-
-        # Setup iperf processes:
-        #     Primary client <-> SoftAP interface on FuchsiaDevice
-        #     AP <-> Client interface on FuchsiaDevice
-        process_errors = mp.Queue()
-        iperf_soft_ap = mp.Process(
-            target=self.run_iperf_traffic_parallel_process,
-            args=[
-                self.iperf_clients_map[self.primary_client],
-                dut_ap_ipv4,
-                process_errors,
-            ],
-        )
-
-        iperf_fuchsia_client = mp.Process(
-            target=self.run_iperf_traffic_parallel_process,
-            args=[ap_iperf_client, dut_client_ipv4, process_errors],
-            kwargs={"server_port": 5202},
-        )
-
-        # Run iperf processes simultaneously
-        self.log.info(
-            "Running simultaneous iperf traffic: between AP and DUT "
-            "client interface, and DUT AP interface and client."
-        )
-
-        iperf_soft_ap.start()
-        iperf_fuchsia_client.start()
-
-        # Block until processes can join or timeout
-        for proc in [iperf_soft_ap, iperf_fuchsia_client]:
-            proc.join(timeout=DEFAULT_IPERF_TIMEOUT)
-            if proc.is_alive():
-                proc.terminate()
-                proc.join()
-                raise RuntimeError("Failed to join process %s" % proc)
-
-        # Stop iperf server (also stopped in teardown class as failsafe)
-        secondary_iperf_server.stop()
-
-        # Check errors from parallel processes
-        if process_errors.empty():
-            asserts.explicit_pass(
-                "FuchsiaDevice was successfully able to pass traffic as a "
-                "client and an AP simultaneously."
-            )
-        else:
-            while not process_errors.empty():
-                self.log.error("Error in iperf process: %s" % process_errors.get())
-            asserts.fail(
-                "FuchsiaDevice failed to pass traffic as a client and an AP "
-                "simultaneously."
-            )
-
-    def test_soft_ap_association_stress(self):
-        """Sets up a single AP and repeatedly associate/disassociate
-        a client, verifying connection every time
-
-        Each test creates 1 SoftAP and repeatedly associates/disassociates
-        client.
-
-        Example Config
-        "soft_ap_test_params" : {
-            "soft_ap_association_stress_tests": [
-                {
-                    "ssid": "test_network",
-                    "security_type": "wpa2",
-                    "password": "password",
-                    "connectivity_mode": "local_only",
-                    "operating_band": "only_2_4_ghz",
-                    "iterations": 10
-                }
-            ]
-        }
-        """
-        tests = self.soft_ap_test_params.get(
-            "test_soft_ap_association_stress",
-            [dict(test_name="test_soft_ap_association_stress_default")],
-        )
-
-        test_settings_list = []
-        for config_settings in tests:
-            soft_ap_params = get_soft_ap_params_from_config_or_default(
-                config_settings.get("soft_ap_params", {})
-            )
-            test_type = config_settings.get("test_type", "associate_and_pass_traffic")
-            iterations = config_settings.get(
-                "iterations", DEFAULT_STRESS_TEST_ITERATIONS
-            )
-            test_settings = {
-                "test_name": config_settings.get(
-                    "test_name",
-                    "test_soft_ap_association_stress_%s_iterations" % iterations,
-                ),
-                "client": self.primary_client,
-                "soft_ap_params": soft_ap_params,
-                "test_type": test_type,
-                "iterations": iterations,
-            }
-            test_settings_list.append(test_settings)
-
-        self.run_generated_testcases(
-            self.run_soft_ap_association_stress_test,
-            test_settings_list,
-            name_func=get_test_name_from_settings,
-        )
-
-    def test_soft_ap_and_client_mode_alternating_stress(self):
-        """Runs tests that alternate between SoftAP and Client modes.
-
-        Each tests sets up an AP. Then, for each iteration:
-            - DUT starts up SoftAP, client associates with SoftAP,
-                connection is verified, then disassociates
-            - DUT associates to the AP, connection is verified, then
-                disassociates
-
-        Example Config:
-        "soft_ap_test_params": {
-            "toggle_soft_ap_and_client_tests": [
-                {
-                    "test_name": "test_wpa2_client_ap_toggle",
-                    "ap_params": {
-                        "channel": 6,
-                        "ssid": "test-ap-network",
-                        "security_mode": "wpa2",
-                        "password": "password"
-                    },
-                    "soft_ap_params": {
-                        "ssid": "test-soft-ap-network",
-                        "security_type": "wpa2",
-                        "password": "other-password",
-                        "connectivity_mode": "local_only",
-                        "operating_band": "only_2_4_ghz"
-                    },
-                    "iterations": 5
-                }
-            ]
-        }
-        """
-        asserts.skip_if(not self.access_point, "No access point provided.")
-        tests = self.soft_ap_test_params.get(
-            "test_soft_ap_and_client_mode_alternating_stress",
-            [dict(test_name="test_soft_ap_and_client_mode_alternating_stress_default")],
-        )
-
-        test_settings_list = []
-        for config_settings in tests:
-            ap_params = get_ap_params_from_config_or_default(
-                config_settings.get("ap_params", {})
-            )
-            soft_ap_params = get_soft_ap_params_from_config_or_default(
-                config_settings.get("soft_ap_params", {})
-            )
-            iterations = config_settings.get(
-                "iterations", DEFAULT_STRESS_TEST_ITERATIONS
-            )
-
-            test_settings = {
-                "test_name": config_settings.get(
-                    "test_name",
-                    "test_soft_ap_and_client_mode_alternating_stress_%s_iterations"
-                    % iterations,
-                ),
-                "iterations": iterations,
-                "soft_ap_params": soft_ap_params,
-                "ap_params": ap_params,
-            }
-
-            test_settings_list.append(test_settings)
-        self.run_generated_testcases(
-            test_func=self.run_soft_ap_and_client_mode_alternating_test,
-            settings=test_settings_list,
-            name_func=get_test_name_from_settings,
-        )
-
-    def test_soft_ap_toggle_stress(self):
-        """Runs SoftAP toggling stress test.
-
-        Each iteration toggles SoftAP to the opposite state (up or down).
-
-        If toggled up, a client is associated and connection is verified
-        If toggled down, test verifies client is not connected
-
-        Will run with default params, but custom tests can be provided in the
-        ACTS config.
-
-        Example Config
-        "soft_ap_test_params" : {
-            "test_soft_ap_toggle_stress": [
-                "soft_ap_params": {
-                    "security_type": "wpa2",
-                    "password": "password",
-                    "connectivity_mode": "local_only",
-                    "operating_band": "only_2_4_ghz",
-                },
-                "iterations": 10
-            ]
-        }
-        """
-        tests = self.soft_ap_test_params.get(
-            "test_soft_ap_toggle_stress",
-            [dict(test_name="test_soft_ap_toggle_stress_default")],
-        )
-
-        test_settings_list = []
-        for config_settings in tests:
-            soft_ap_params = get_soft_ap_params_from_config_or_default(
-                config_settings.get("soft_ap_params", {})
-            )
-            iterations = config_settings.get(
-                "iterations", DEFAULT_STRESS_TEST_ITERATIONS
-            )
-            test_settings = {
-                "test_name": config_settings.get(
-                    "test_name", "test_soft_ap_toggle_stress_%s_iterations" % iterations
-                ),
-                "test_runner_func": self.soft_ap_toggle_test_iteration,
-                "soft_ap_params": soft_ap_params,
-                "iterations": iterations,
-            }
-            test_settings_list.append(test_settings)
-
-        self.run_generated_testcases(
-            self.run_toggle_stress_test,
-            test_settings_list,
-            name_func=get_test_name_from_settings,
-        )
-
-    def test_client_mode_toggle_stress(self):
-        """Runs client mode toggling stress test.
-
-        Each iteration toggles client mode to the opposite state (up or down).
-
-        If toggled up, DUT associates to AP, and connection is verified
-        If toggled down, test verifies DUT is not connected to AP
-
-        Will run with default params, but custom tests can be provided in the
-        ACTS config.
-
-        Example Config
-        "soft_ap_test_params" : {
-            "test_client_mode_toggle_stress": [
-                "soft_ap_params": {
-                    'ssid': ssid,
-                    'channel': channel,
-                    'security_mode': security,
-                    'password': password
-                },
-                "iterations": 10
-            ]
-        }
-        """
-        asserts.skip_if(not self.access_point, "No access point provided.")
-        tests = self.soft_ap_test_params.get(
-            "test_client_mode_toggle_stress",
-            [dict(test_name="test_client_mode_toggle_stress_default")],
-        )
-
-        test_settings_list = []
-        for config_settings in tests:
-            ap_params = get_ap_params_from_config_or_default(
-                config_settings.get("ap_params", {})
-            )
-            iterations = config_settings.get(
-                "iterations", DEFAULT_STRESS_TEST_ITERATIONS
-            )
-            test_settings = {
-                "test_name": config_settings.get(
-                    "test_name",
-                    "test_client_mode_toggle_stress_%s_iterations" % iterations,
-                ),
-                "test_runner_func": self.client_mode_toggle_test_iteration,
-                "pre_test_func": self.client_mode_toggle_pre_test,
-                "ap_params": ap_params,
-                "iterations": iterations,
-            }
-            test_settings_list.append(test_settings)
-        self.run_generated_testcases(
-            self.run_toggle_stress_test,
-            test_settings_list,
-            name_func=get_test_name_from_settings,
-        )
-
-    def test_soft_ap_toggle_stress_with_client_mode(self):
-        """Same as test_soft_ap_toggle_stress, but client mode is set up
-        at test start and verified after every toggle."""
-        asserts.skip_if(not self.access_point, "No access point provided.")
-        tests = self.soft_ap_test_params.get(
-            "test_soft_ap_toggle_stress_with_client_mode",
-            [dict(test_name="test_soft_ap_toggle_stress_with_client_mode_default")],
-        )
-
-        test_settings_list = []
-        for config_settings in tests:
-            soft_ap_params = get_soft_ap_params_from_config_or_default(
-                config_settings.get("soft_ap_params", {})
-            )
-            ap_params = get_ap_params_from_config_or_default(
-                config_settings.get("ap_params", {})
-            )
-            iterations = config_settings.get(
-                "iterations", DEFAULT_STRESS_TEST_ITERATIONS
-            )
-            test_settings = {
-                "test_name": config_settings.get(
-                    "test_name",
-                    "test_soft_ap_toggle_stress_with_client_mode_%s_iterations"
-                    % iterations,
-                ),
-                "test_runner_func": self.soft_ap_toggle_with_client_mode_iteration,
-                "pre_test_func": self.soft_ap_toggle_with_client_mode_pre_test,
-                "soft_ap_params": soft_ap_params,
-                "ap_params": ap_params,
-                "iterations": iterations,
-            }
-            test_settings_list.append(test_settings)
-        self.run_generated_testcases(
-            self.run_toggle_stress_test,
-            test_settings_list,
-            name_func=get_test_name_from_settings,
-        )
-
-    def test_client_mode_toggle_stress_with_soft_ap(self):
-        """Same as test_client_mode_toggle_stress, but softap is set up at
-        test start and verified after every toggle."""
-        asserts.skip_if(not self.access_point, "No access point provided.")
-        tests = self.soft_ap_test_params.get(
-            "test_client_mode_toggle_stress_with_soft_ap",
-            [dict(test_name="test_client_mode_toggle_stress_with_soft_ap_default")],
-        )
-
-        test_settings_list = []
-        for config_settings in tests:
-            soft_ap_params = get_soft_ap_params_from_config_or_default(
-                config_settings.get("soft_ap_params", {})
-            )
-            ap_params = get_ap_params_from_config_or_default(
-                config_settings.get("ap_params", {})
-            )
-            iterations = config_settings.get(
-                "iterations", DEFAULT_STRESS_TEST_ITERATIONS
-            )
-            test_settings = {
-                "test_name": config_settings.get(
-                    "test_name",
-                    "test_client_mode_toggle_stress_with_soft_ap_%s_iterations"
-                    % iterations,
-                ),
-                "test_runner_func": self.client_mode_toggle_with_soft_ap_iteration,
-                "pre_test_func": self.client_mode_toggle_with_soft_ap_pre_test,
-                "soft_ap_params": soft_ap_params,
-                "ap_params": ap_params,
-                "iterations": iterations,
-            }
-            test_settings_list.append(test_settings)
-        self.run_generated_testcases(
-            self.run_toggle_stress_test,
-            test_settings_list,
-            name_func=get_test_name_from_settings,
-        )
-
-    def test_soft_ap_and_client_mode_random_toggle_stress(self):
-        """Same as above toggle stres tests, but each iteration, either softap,
-        client mode, or both are toggled, then states are verified."""
-        asserts.skip_if(not self.access_point, "No access point provided.")
-        tests = self.soft_ap_test_params.get(
-            "test_soft_ap_and_client_mode_random_toggle_stress",
-            [
-                dict(
-                    test_name="test_soft_ap_and_client_mode_random_toggle_stress_default"
-                )
-            ],
-        )
-
-        test_settings_list = []
-        for config_settings in tests:
-            soft_ap_params = get_soft_ap_params_from_config_or_default(
-                config_settings.get("soft_ap_params", {})
-            )
-            ap_params = get_ap_params_from_config_or_default(
-                config_settings.get("ap_params", {})
-            )
-            iterations = config_settings.get(
-                "iterations", DEFAULT_STRESS_TEST_ITERATIONS
-            )
-            test_settings = {
-                "test_name": config_settings.get(
-                    "test_name",
-                    "test_soft_ap_and_client_mode_random_toggle_stress_%s_iterations"
-                    % iterations,
-                ),
-                "soft_ap_params": soft_ap_params,
-                "ap_params": ap_params,
-                "iterations": iterations,
-            }
-            test_settings_list.append(test_settings)
-        self.run_generated_testcases(
-            self.run_soft_ap_and_client_mode_random_toggle_stress_test,
-            test_settings_list,
-            name_func=get_test_name_from_settings,
-        )
-
-
-if __name__ == "__main__":
-    test_runner.main()
diff --git a/src/antlion/tests/wlan/functional/WlanRebootTest.py b/src/antlion/tests/wlan/functional/WlanRebootTest.py
deleted file mode 100644
index 5c8406b..0000000
--- a/src/antlion/tests/wlan/functional/WlanRebootTest.py
+++ /dev/null
@@ -1,824 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import itertools
-import os
-import time
-from multiprocessing import Process
-
-from antlion import context, utils
-from antlion.controllers import iperf_client, iperf_server
-from antlion.controllers.access_point import AccessPoint, setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.hostapd_security import Security
-from antlion.controllers.ap_lib.hostapd_utils import generate_random_password
-from antlion.net import wait_for_port
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-from antlion.test_utils.wifi import base_test
-
-from mobly import asserts, test_runner
-
-# Constants, for readibility
-AP = "ap"
-DUT = "dut"
-DEVICES = [AP, DUT]
-
-SOFT = "soft"
-HARD = "hard"
-REBOOT_TYPES = [SOFT, HARD]
-
-BAND_2G = "2g"
-BAND_5G = "5g"
-BANDS = [BAND_2G, BAND_5G]
-
-IPV4 = "ipv4"
-IPV6 = "ipv6"
-DUAL_IPV4_IPV6 = {IPV4: True, IPV6: True}
-IPV4_ONLY = {IPV4: True, IPV6: False}
-IPV6_ONLY = {IPV4: False, IPV6: True}
-IP_VERSIONS = [IPV4_ONLY, IPV6_ONLY, DUAL_IPV4_IPV6]
-
-INTERRUPTS = [True, False]
-OPEN_ENCRYPTION_STRING = "open"
-SECURITY_MODES = [
-    OPEN_ENCRYPTION_STRING,
-    hostapd_constants.WPA2_STRING,
-    hostapd_constants.WPA3_STRING,
-]
-
-DEFAULT_IPERF_TIMEOUT = 30
-
-DUT_NETWORK_CONNECTION_TIMEOUT = 60
-DUT_IP_ADDRESS_TIMEOUT = 30  # max time for DAD to complete
-
-# Constants for Custom Reboot Tests
-ALL = "all"
-BOTH = "both"
-
-CUSTOM_TEST_REBOOT_DEVICES = {AP: [AP], DUT: [DUT], ALL: [AP, DUT]}
-CUSTOM_TEST_REBOOT_TYPES = {SOFT: [SOFT], HARD: [HARD], ALL: [SOFT, HARD]}
-CUSTOM_TEST_BANDS = {BAND_2G: [BAND_2G], BAND_5G: [BAND_5G], ALL: [BAND_2G, BAND_5G]}
-CUSTOM_TEST_IP_VERSIONS = {
-    IPV4: [IPV4_ONLY],
-    IPV6: [IPV6_ONLY],
-    BOTH: [DUAL_IPV4_IPV6],
-    ALL: [IPV4_ONLY, IPV6_ONLY, DUAL_IPV4_IPV6],
-}
-CUSTOM_TEST_INTERRUPTS = {"true": [True], "false": [False], ALL: [True, False]}
-
-
-class WlanRebootTest(base_test.WifiBaseTest):
-    """Tests wlan reconnects in different reboot scenarios.
-
-    Testbed Requirement:
-    * One ACTS compatible device (dut)
-    * One Whirlwind Access Point (will also serve as iperf server)
-    * One PduDevice
-    """
-
-    def __init__(self, controllers):
-        super().__init__(controllers)
-
-    def setup_generated_tests(self):
-        self._read_wlan_reboot_test_params()
-        self.generate_tests(
-            test_logic=self.run_reboot_test,
-            name_func=self.generate_test_name,
-            arg_sets=self.generate_test_args(),
-        )
-
-    def setup_class(self):
-        super().setup_class()
-        self.android_devices = getattr(self, "android_devices", [])
-        self.fuchsia_devices = getattr(self, "fuchsia_devices", [])
-        self.pdu_devices = getattr(self, "pdu_devices", [])
-
-        device_type = self.user_params.get("dut", "fuchsia_devices")
-        if device_type == "fuchsia_devices":
-            self.dut = create_wlan_device(self.fuchsia_devices[0])
-        elif device_type == "android_devices":
-            self.dut = create_wlan_device(self.android_devices[0])
-        else:
-            raise ValueError(
-                f'Invalid "dut" type specified in config: "{device_type}".'
-                'Expected "fuchsia_devices" or "android_devices".'
-            )
-
-        self.access_point: AccessPoint = self.access_points[0]
-
-        self.iperf_server_on_ap = None
-        self.iperf_client_on_dut = None
-        if not self.skip_iperf:
-            if hasattr(self, "iperf_clients") and self.iperf_clients:
-                self.iperf_client_on_dut = self.iperf_clients[0]
-            else:
-                self.iperf_client_on_dut = self.dut.create_iperf_client()
-        else:
-            self.log.info(
-                "Skipping iperf throughput validation as requested by ACTS " "config"
-            )
-
-    def setup_test(self):
-        self.access_point.stop_all_aps()
-        self.dut.wifi_toggle_state(True)
-        for ad in self.android_devices:
-            ad.droid.wakeLockAcquireBright()
-            ad.droid.wakeUpNow()
-        self.dut.disconnect()
-        self.dut.device.configure_wlan()
-        self.ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
-
-    def teardown_test(self):
-        # TODO(b/273923552): We take a snapshot here and before rebooting the
-        # DUT for every test because the persistence component does not make the
-        # inspect logs available for 120 seconds. This helps for debugging
-        # issues where we need previous state.
-        self.dut.device.take_bug_report()
-        self.download_ap_logs()
-        self.access_point.stop_all_aps()
-        self.dut.disconnect()
-        for ad in self.android_devices:
-            ad.droid.wakeLockRelease()
-            ad.droid.goToSleepNow()
-        self.dut.turn_location_off_and_scan_toggle_off()
-        self.dut.reset_wifi()
-
-    def setup_ap(
-        self, ssid, band, ipv4=True, ipv6=False, security_mode=None, password=None
-    ):
-        """Setup ap with basic config.
-
-        Args:
-            ssid: string, ssid to setup on ap
-            band: string ('2g' or '5g') of band to setup.
-            ipv4: True if using ipv4 (dhcp), else False.
-            ipv6: True if using ipv6 (radvd), else False.
-        """
-        # TODO(fxb/63719): Add varying AP parameters
-        security_profile = None
-        if security_mode:
-            security_profile = Security(security_mode=security_mode, password=password)
-        if band == BAND_2G:
-            setup_ap(
-                access_point=self.access_point,
-                profile_name="whirlwind",
-                channel=11,
-                ssid=ssid,
-                security=security_profile,
-                is_ipv6_enabled=ipv6,
-            )
-        elif band == BAND_5G:
-            setup_ap(
-                access_point=self.access_point,
-                profile_name="whirlwind",
-                channel=36,
-                ssid=ssid,
-                security=security_profile,
-                is_ipv6_enabled=ipv6,
-            )
-
-        if not ipv4:
-            self.access_point.stop_dhcp()
-
-        self.log.info("Network (SSID: %s) is up." % ssid)
-
-    def setup_iperf_server_on_ap(self, band) -> iperf_server.IPerfServerOverSsh:
-        """Configures iperf server based on the tests band.
-
-        Args:
-            band: string ('2g' or '5g') of band to setup.
-        """
-        if band == BAND_2G:
-            return iperf_server.IPerfServerOverSsh(
-                self.access_point.ssh_settings,
-                5201,
-                test_interface=self.access_point.wlan_2g,
-            )
-        elif band == BAND_5G:
-            return iperf_server.IPerfServerOverSsh(
-                self.access_point.ssh_settings,
-                5201,
-                test_interface=self.access_point.wlan_5g,
-            )
-
-    def get_iperf_server_address(self, iperf_server_on_ap, ip_version):
-        """Retrieves the ip address of the iperf server.
-
-        Args:
-            iperf_server_on_ap: IPerfServer object, linked to AP
-            ip_version: string, the ip version (ipv4 or ipv6)
-
-        Returns:
-            String, the ip address of the iperf_server
-        """
-        iperf_server_addresses = iperf_server_on_ap.get_interface_ip_addresses(
-            iperf_server_on_ap.test_interface
-        )
-        if ip_version == IPV4:
-            iperf_server_ip_address = iperf_server_addresses["ipv4_private"][0]
-        elif ip_version == IPV6:
-            if iperf_server_addresses["ipv6_private_local"]:
-                iperf_server_ip_address = iperf_server_addresses["ipv6_private_local"][
-                    0
-                ]
-            else:
-                iperf_server_ip_address = "%s%%%s" % (
-                    iperf_server_addresses["ipv6_link_local"][0],
-                    self.iperf_client_on_dut.test_interface,
-                )
-        else:
-            raise ValueError("Invalid IP version: %s" % ip_version)
-
-        return iperf_server_ip_address
-
-    def verify_traffic_between_dut_and_ap(
-        self, iperf_server_on_ap, iperf_client_on_dut, ip_version=IPV4
-    ):
-        """Runs IPerf traffic from the iperf client (dut) and the iperf
-        server (and vice versa) and verifies traffic was able to pass
-        successfully.
-
-        Args:
-            iperf_server_on_ap: IPerfServer object, linked to AP
-            iperf_client_on_dut: IPerfClient object, linked to DUT
-            ip_version: string, the ip version (ipv4 or ipv6)
-
-        Raises:
-            ValueError, if invalid ip_version is passed.
-            ConnectionError, if traffic is not passed successfully in both
-                directions.
-        """
-        dut_ip_addresses = self.dut.device.get_interface_ip_addresses(
-            iperf_client_on_dut.test_interface
-        )
-
-        iperf_server_ip_address = self.get_iperf_server_address(
-            iperf_server_on_ap, ip_version
-        )
-
-        self.log.info(
-            "Attempting to pass traffic from DUT to IPerf server (%s)."
-            % iperf_server_ip_address
-        )
-        tx_file = iperf_client_on_dut.start(
-            iperf_server_ip_address,
-            "-i 1 -t 3 -J",
-            "reboot_tx",
-            timeout=DEFAULT_IPERF_TIMEOUT,
-        )
-        tx_results = iperf_server.IPerfResult(tx_file)
-        if not tx_results.avg_receive_rate or tx_results.avg_receive_rate == 0:
-            raise ConnectionError(
-                "Failed to pass IPerf traffic from DUT to server (%s). TX "
-                "Average Receive Rate: %s"
-                % (iperf_server_ip_address, tx_results.avg_receive_rate)
-            )
-        else:
-            self.log.info(
-                "Success: Traffic passed from DUT to IPerf server (%s)."
-                % iperf_server_ip_address
-            )
-        self.log.info(
-            "Attempting to pass traffic from IPerf server (%s) to DUT."
-            % iperf_server_ip_address
-        )
-        rx_file = iperf_client_on_dut.start(
-            iperf_server_ip_address,
-            "-i 1 -t 3 -R -J",
-            "reboot_rx",
-            timeout=DEFAULT_IPERF_TIMEOUT,
-        )
-        rx_results = iperf_server.IPerfResult(rx_file)
-        if not rx_results.avg_receive_rate or rx_results.avg_receive_rate == 0:
-            raise ConnectionError(
-                "Failed to pass IPerf traffic from server (%s) to DUT. RX "
-                "Average Receive Rate: %s"
-                % (iperf_server_ip_address, rx_results.avg_receive_rate)
-            )
-        else:
-            self.log.info(
-                "Success: Traffic passed from IPerf server (%s) to DUT."
-                % iperf_server_ip_address
-            )
-
-    def start_dut_ping_process(self, iperf_server_on_ap, ip_version=IPV4):
-        """Creates a  process that pings the AP from the DUT.
-
-        Runs in parallel for 15 seconds, so it can be interrupted by a reboot.
-        Sleeps for a few seconds to ensure pings have started.
-
-        Args:
-            iperf_server_on_ap: IPerfServer object, linked to AP
-            ip_version: string, the ip version (ipv4 or ipv6)
-        """
-        ap_address = self.get_iperf_server_address(iperf_server_on_ap, ip_version)
-        if ap_address:
-            self.log.info(
-                "Starting ping process to %s in parallel. Logs from this "
-                "process will be suppressed, since it will be intentionally "
-                "interrupted." % ap_address
-            )
-            ping_proc = Process(
-                target=self.dut.ping, args=[ap_address], kwargs={"count": 15}
-            )
-            with utils.SuppressLogOutput():
-                ping_proc.start()
-            # Allow for a few seconds of pinging before allowing it to be
-            # interrupted.
-            time.sleep(3)
-        else:
-            raise ConnectionError("Failed to retrieve APs iperf address.")
-
-    def prepare_dut_for_reconnection(self):
-        """Perform any actions to ready DUT for reconnection.
-
-        These actions will vary depending on the DUT. eg. android devices may
-        need to be woken up, ambient devices should not require any interaction,
-        etc.
-        """
-        self.dut.wifi_toggle_state(True)
-        for ad in self.android_devices:
-            ad.droid.wakeUpNow()
-
-    def wait_for_dut_network_connection(self, ssid):
-        """Checks if device is connected to given network. Sleeps 1 second
-        between retries.
-
-        Args:
-            ssid: string of ssid
-        Raises:
-            ConnectionError, if DUT is not connected after all timeout.
-        """
-        self.log.info(
-            "Checking if DUT is connected to %s network. Will retry for %s "
-            "seconds." % (ssid, self.dut_network_connection_timeout)
-        )
-        timeout = time.time() + self.dut_network_connection_timeout
-        while time.time() < timeout:
-            try:
-                is_connected = self.dut.is_connected(ssid=ssid)
-            except Exception as err:
-                self.log.debug("SL4* call failed. Retrying in 1 second.")
-                is_connected = False
-            finally:
-                if is_connected:
-                    self.log.info("Success: DUT has connected.")
-                    break
-                else:
-                    self.log.debug(
-                        "DUT not connected to network %s...retrying in 1 second." % ssid
-                    )
-                    time.sleep(1)
-        else:
-            raise ConnectionError("DUT failed to connect to the network.")
-
-    def write_csv_time_to_reconnect(self, test_name, time_to_reconnect):
-        """Writes the time to reconnect to a csv file.
-        Args:
-            test_name: the name of the test case
-            time_to_reconnect: the time from when the rebooted device came back
-                up to when it reassociated (or 'FAIL'), if it failed to
-                reconnect.
-        """
-        log_context = context.get_current_context()
-        log_path = os.path.join(log_context.get_base_output_path(), "WlanRebootTest/")
-        csv_file_name = "%stime_to_reconnect.csv" % log_path
-        self.log.info("Writing to %s" % csv_file_name)
-        with open(csv_file_name, "a") as csv_file:
-            csv_file.write("%s,%s\n" % (test_name, time_to_reconnect))
-
-    def log_and_continue(self, run, time_to_reconnect=None, error=None):
-        """Writes the time to reconnect to the csv file before continuing, used
-        in stress tests runs.
-
-        Args:
-            time_to_reconnect: the time from when the rebooted device came back
-                ip to when reassociation occurred.
-            run: the run number in a looped stress tested.,
-            error: string, error message to log before continuing with the test
-        """
-        if error:
-            self.log.info(
-                "Device failed to reconnect to network %s on run %s. Error: %s"
-                % (self.ssid, run, error)
-            )
-            self.write_csv_time_to_reconnect(
-                "%s_run_%s" % (self.test_name, run), "FAIL"
-            )
-
-        else:
-            self.log.info(
-                "Device successfully reconnected to network %s after %s seconds"
-                " on run %s." % (self.ssid, time_to_reconnect, run)
-            )
-            self.write_csv_time_to_reconnect(
-                "%s_run_%s" % (self.test_name, run), time_to_reconnect
-            )
-
-    def run_reboot_test(self, settings):
-        """Runs a reboot test based on a given config.
-            1. Setups up a network, associates the dut, and saves the network.
-            2. Verifies the dut receives ip address(es).
-            3. Verifies traffic between DUT and AP (IPerf client and server).
-            4. Reboots (hard or soft) the device (dut or ap).
-                - If the ap was rebooted, setup the same network again.
-            5. Wait for reassociation or timeout.
-            6. If reassocation occurs:
-                - Verifies the dut receives ip address(es).
-                - Verifies traffic between DUT and AP (IPerf client and server).
-            7. Logs time to reconnect (or failure to reconnect)
-            8. If stress testing, repeats steps 4 - 7 for N iterations.
-
-        Args:
-            settings: dictionary containing the following values:
-                reboot_device: string ('dut' or 'ap') of the device to reboot.
-                reboot_type: string ('soft' or 'hard') of how to reboot the
-                    reboot_device.
-                band: string ('2g' or '5g') of band to setup.
-                ipv4: True if using ipv4 (dhcp), else False.
-                ipv6: True if using ipv6 (radvd), else False.
-
-                Optional:
-                    interrupt: if True, the DUT will be pinging the AP in a
-                        parallel process when the reboot occurs. This is used to
-                        compare reconnect times when idle to active.
-                    test_name: name of the test, used when stress testing.
-                    iterations: number of times to perform test, used when stress
-                        testing.
-
-        Raises:
-            ValueError, if ipv4 and ipv6 are both False
-            ValueError, if band is not '2g' or '5g'
-            ValueError, if reboot_device is not 'dut' or 'ap'
-            ValueError, if reboot_type is not 'soft' or 'hard'
-
-        """
-        iterations = settings.get("iterations", 1)
-        passed_count = 0
-        ipv4 = settings.get("ipv4", None)
-        ipv6 = settings.get("ipv6", None)
-        reboot_device = settings["reboot_device"]
-        reboot_type = settings["reboot_type"]
-        band = settings["band"]
-        security_mode = settings.get("security_mode", None)
-        password = settings.get("password", None)
-        if security_mode:
-            if security_mode.lower() == "open":
-                security_mode = None
-            elif not password:
-                password = generate_random_password(security_mode=security_mode)
-        interrupt = settings.get("interrupt", None)
-        # Skip hard reboots if no PDU present
-        asserts.skip_if(
-            reboot_type == HARD and len(self.pdu_devices) == 0,
-            "Hard reboots require a PDU device.",
-        )
-        # Skip DUT reboot w/ interrupt tests, since they are not more helpful
-        # and may cause threading issues.
-        asserts.skip_if(
-            (reboot_device == DUT) and interrupt,
-            "Stream interrupts for DUT reboots are prone to threading issues "
-            "and are not supported.",
-        )
-
-        # Validate test settings.
-        if not ipv4 and not ipv6:
-            raise ValueError("Either ipv4, ipv6, or both must be True.")
-        if reboot_device != DUT and reboot_device != AP:
-            raise ValueError("Invalid reboot device: %s" % reboot_device)
-        if reboot_type != SOFT and reboot_type != HARD:
-            raise ValueError("Invalid reboot type: %s" % reboot_type)
-        if band != BAND_2G and band != BAND_5G:
-            raise ValueError("Invalid band: %s" % band)
-
-        self.setup_ap(self.ssid, band, ipv4, ipv6, security_mode, password)
-        if not self.dut.associate(
-            self.ssid,
-            target_security=hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
-                security_mode
-            ),
-            target_pwd=password,
-        ):
-            raise EnvironmentError("Initial network connection failed.")
-
-        if not self.skip_iperf:
-            dut_test_interface = self.iperf_client_on_dut.test_interface
-            if ipv4:
-                self.dut.device.wait_for_ipv4_addr(dut_test_interface)
-            if ipv6:
-                self.dut.device.wait_for_ipv6_addr(dut_test_interface)
-
-            self.iperf_server_on_ap = self.setup_iperf_server_on_ap(band)
-            self.iperf_server_on_ap.start()
-            wait_for_port(self.iperf_server_on_ap.ssh_settings.hostname, 5201)
-
-            ip_version = IPV6 if ipv6 else IPV4
-            self.verify_traffic_between_dut_and_ap(
-                self.iperf_server_on_ap, self.iperf_client_on_dut, ip_version=ip_version
-            )
-
-        # Looping reboots for stress testing
-        for run in range(iterations):
-            run += 1
-            self.log.info("Starting run %s of %s." % (run, iterations))
-
-            # Ping from DUT to AP during AP reboot
-            if interrupt:
-                if ipv4:
-                    self.start_dut_ping_process(self.iperf_server_on_ap)
-                if ipv6:
-                    self.start_dut_ping_process(
-                        self.iperf_server_on_ap, ip_version=IPV6
-                    )
-
-            # TODO(b/273923552): We take a snapshot here and during test
-            # teardown for every test because the persistence component does not
-            # make the inspect logs available for 120 seconds. This helps for
-            # debugging issues where we need previous state.
-            self.dut.device.take_bug_report()
-
-            # DUT reboots
-            if reboot_device == DUT:
-                if (
-                    not self.skip_iperf
-                    and type(self.iperf_client_on_dut)
-                    == iperf_client.IPerfClientOverSsh
-                ):
-                    self.iperf_client_on_dut.close_ssh()
-                if reboot_type == SOFT:
-                    self.dut.device.reboot()
-                elif reboot_type == HARD:
-                    self.dut.hard_power_cycle(self.pdu_devices)
-
-            # AP reboots
-            elif reboot_device == AP:
-                if reboot_type == SOFT:
-                    self.log.info("Cleanly stopping ap.")
-                    self.access_point.stop_all_aps()
-                elif reboot_type == HARD:
-                    if not self.skip_iperf:
-                        self.iperf_server_on_ap.close_ssh()
-                    self.access_point.hard_power_cycle(self.pdu_devices)
-                self.setup_ap(self.ssid, band, ipv4, ipv6, security_mode, password)
-
-            self.prepare_dut_for_reconnection()
-            uptime = time.time()
-            try:
-                self.wait_for_dut_network_connection(self.ssid)
-                time_to_reconnect = time.time() - uptime
-
-                if not self.skip_iperf:
-                    if ipv4:
-                        self.dut.device.wait_for_ipv4_addr(dut_test_interface)
-                    if ipv6:
-                        self.dut.device.wait_for_ipv6_addr(dut_test_interface)
-
-                    self.iperf_server_on_ap.start()
-
-                    if ipv4:
-                        self.verify_traffic_between_dut_and_ap(
-                            self.iperf_server_on_ap, self.iperf_client_on_dut
-                        )
-                    if ipv6:
-                        self.verify_traffic_between_dut_and_ap(
-                            self.iperf_server_on_ap,
-                            self.iperf_client_on_dut,
-                            ip_version=IPV6,
-                        )
-
-            except ConnectionError as err:
-                self.log_and_continue(run, error=err)
-            else:
-                passed_count += 1
-                self.log_and_continue(run, time_to_reconnect=time_to_reconnect)
-
-        if passed_count == iterations:
-            asserts.explicit_pass(
-                "Test Summary: device successfully reconnected to network %s "
-                "%s/%s times." % (self.ssid, passed_count, iterations)
-            )
-
-        else:
-            asserts.fail(
-                "Test Summary: device failed reconnection test. Reconnected to "
-                "network %s %s/%s times." % (self.ssid, passed_count, iterations)
-            )
-
-    def generate_test_name(self, settings):
-        """Generates a test case name based on the reboot settings passed.
-
-        Args:
-            settings: A dictionary of settings related to reboot test.
-
-        Returns:
-            A string that represents a test case name.
-        """
-        test_name = (
-            "test_{reboot_type}_reboot_{reboot_device}_{band}_{security_mode}".format(
-                **settings
-            )
-        )
-
-        if settings.get(IPV4):
-            test_name += "_ipv4"
-
-        if settings.get(IPV6):
-            test_name += "_ipv6"
-
-        if settings.get("interrupt"):
-            test_name += "_interrupt"
-
-        if settings.get("iterations"):
-            test_name += f"_with_{settings['iterations']}_iterations"
-
-        return test_name
-
-    def generate_test_args(self):
-        # If custom reboot tests present in ACTS config then run just those
-        test_args = self._generate_custom_reboots_test_args()
-        if test_args:
-            return test_args
-
-        # Interrupt tests requires using iperf. So do not run interrupt tests
-        # when skip_iperf is True
-        if self.skip_iperf is True:
-            interrupts = [False]
-        else:
-            interrupts = INTERRUPTS
-
-        for (
-            reboot_device,
-            reboot_type,
-            band,
-            ip_version,
-            interrupt,
-            security_mode,
-        ) in itertools.product(
-            DEVICES, REBOOT_TYPES, BANDS, IP_VERSIONS, interrupts, SECURITY_MODES
-        ):
-            settings = {
-                "reboot_device": reboot_device,
-                "reboot_type": reboot_type,
-                "band": band,
-                "security_mode": security_mode,
-                "ipv4": ip_version["ipv4"],
-                "ipv6": ip_version["ipv6"],
-                "interrupt": interrupt,
-            }
-            test_args.append((settings,))
-
-        return test_args
-
-    def _generate_custom_reboots_test_args(self):
-        """Used to create custom reboot tests from antlion config. Can be
-        individual tests or permutation sets (i.e. setting "all" for a
-        test param will run a test with every permutation).
-
-        Parameters:
-            reboot_device: string - "ap", "dut", or "all"
-            reboot_type: string - "soft", "hard", or "all"
-            band: string, "2g" - "5g", "all"
-            ip_version: string - "ipv4", "ipv6", "both", or "all"
-            interrupt: bool - whether to have traffic flowing at reboot
-            security_modes: optional, string or list - "open", "wep", "wpa",
-                "wpa2", "wpa/wpa2", "wpa3", "wpa2/wpa3"
-            iterations: int - number of iterations for each test
-
-        Example:
-        "wlan_reboot_test_params": {
-            "test_custom_reboots": [
-                {
-                    "reboot_device": "dut",
-                    "reboot_type": "soft",
-                    "band": "2g",
-                    "ip_version": "both"
-                },
-                {
-                    "reboot_device": "all",
-                    "reboot_type": "hard",
-                    "band": "all",
-                    "ip_version": ipv4",
-                    "security_modes": "wpa2",
-                    "iterations": 10
-                },
-                {
-                    "reboot_device": "dut",
-                    "reboot_type": "hard",
-                    "band": "5g",
-                    "ip_version": "ipv4",
-                    "security_modes": ["open", "wpa3"]
-                }
-            ]
-        }
-
-        The first example runs a single DUT soft reboot test with a 2.4GHz
-        network and dual ipv4/ipv6.
-
-        The second example runs 4 tests, each with 10 iterations. It runs hard
-        reboots with ipv4 for the permutations of DUT/AP and 2.4GHz/5GHz.
-
-        The third example runs two tests, both hard reboots of the DUT with 5g
-        and ipv4 only, one with open security and one with WPA3.
-        """
-        if "test_custom_reboots" not in self.wlan_reboot_test_params:
-            self.log.info("No custom reboots provided in ACTS config.")
-            return []
-
-        test_args = []
-        for test in self.wlan_reboot_test_params["test_custom_reboots"]:
-            # Ensure required params are present
-            try:
-                reboot_device = test["reboot_device"].lower()
-                reboot_type = test["reboot_type"].lower()
-                band = test["band"].lower()
-                ip_version = test["ip_version"].lower()
-            except KeyError as err:
-                raise AttributeError(
-                    "Must provide reboot_type, reboot_device, ip_version, and "
-                    "band (optionally interrupt and iterations) in custom test "
-                    "config. See test_custom_reboots docstring for details. "
-                    "Err: %s" % err
-                )
-            security_modes = test.get("security_modes", "open")
-            interrupt = str(test.get("interrupt", False)).lower()
-            iterations = test.get("iterations", 1)
-
-            if interrupt == "true" and self.skip_iperf:
-                raise AttributeError(
-                    "Interrupt can't be set to True when iperf is disabled. "
-                    "Update 'skip_iperf' to 'false' in ACTS config and run again"
-                )
-
-            # Validate parameters and convert to lists (for permutations)
-            try:
-                reboot_devices = CUSTOM_TEST_REBOOT_DEVICES[reboot_device]
-                reboot_types = CUSTOM_TEST_REBOOT_TYPES[reboot_type]
-                bands = CUSTOM_TEST_BANDS[band]
-                ip_versions = CUSTOM_TEST_IP_VERSIONS[ip_version]
-                interrupts = CUSTOM_TEST_INTERRUPTS[interrupt]
-                if isinstance(security_modes, str):
-                    security_modes = [security_modes]
-            except KeyError as err:
-                raise AttributeError(
-                    "Invalid custom test parameter provided. Err: %s" % err
-                )
-
-            for (
-                reboot_device,
-                reboot_type,
-                band,
-                ip_version,
-                interrupt,
-                security_mode,
-            ) in itertools.product(
-                reboot_devices,
-                reboot_types,
-                bands,
-                ip_versions,
-                interrupts,
-                security_modes,
-            ):
-                settings = {
-                    "reboot_device": reboot_device,
-                    "reboot_type": reboot_type,
-                    "band": band,
-                    "security_mode": security_mode,
-                    "ipv4": ip_version[IPV4],
-                    "ipv6": ip_version[IPV6],
-                    "interrupt": interrupt,
-                    "iterations": iterations,
-                }
-
-                test_args.append((settings,))
-        return test_args
-
-    def _read_wlan_reboot_test_params(self):
-        self.wlan_reboot_test_params = self.user_params.get(
-            "wlan_reboot_test_params", {}
-        )
-        self.skip_iperf = self.wlan_reboot_test_params.get("skip_iperf", False)
-        # Times (in seconds) to wait for DUT network connection and assigning an
-        # ip address to the wlan interface.
-        self.dut_network_connection_timeout = self.wlan_reboot_test_params.get(
-            "dut_network_connection_timeout", DUT_NETWORK_CONNECTION_TIMEOUT
-        )
-        self.dut_ip_address_timeout = self.wlan_reboot_test_params.get(
-            "dut_ip_address_timeout", DUT_IP_ADDRESS_TIMEOUT
-        )
-
-
-if __name__ == "__main__":
-    test_runner.main()
diff --git a/src/antlion/tests/wlan/functional/WlanScanTest.py b/src/antlion/tests/wlan/functional/WlanScanTest.py
deleted file mode 100644
index ba6961a..0000000
--- a/src/antlion/tests/wlan/functional/WlanScanTest.py
+++ /dev/null
@@ -1,265 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""
-This test exercises basic scanning functionality to confirm expected behavior
-related to wlan scanning
-"""
-
-from datetime import datetime
-
-from antlion.controllers.ap_lib import hostapd_ap_preset
-from antlion.controllers.ap_lib import hostapd_bss_settings
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib import hostapd_security
-from antlion.test_utils.wifi import base_test
-
-from mobly import signals, test_runner
-
-
-class WlanScanTest(base_test.WifiBaseTest):
-    """WLAN scan test class.
-
-    Test Bed Requirement:
-    * One or more Fuchsia devices
-    * Several Wi-Fi networks visible to the device, including an open Wi-Fi
-      network or a onHub/GoogleWifi
-    """
-
-    def setup_class(self):
-        super().setup_class()
-
-        self.access_point = self.access_points[0]
-        self.start_access_point = False
-        for fd in self.fuchsia_devices:
-            fd.configure_wlan(association_mechanism="drivers")
-        if self.access_point:
-            # This section sets up the config that could be sent to the AP if
-            # the AP is needed. The reasoning is since ACTS already connects
-            # to the AP if it is in the config, generating the config in memory
-            # has no over head is used if need by the test if one of the ssids
-            # needed for the test is not included in the config.  The logic
-            # here creates 2 ssids on each radio, 5ghz and 2.4ghz, with an
-            # open, no security network and one that is wpa2, for a total of 4
-            # networks.  However, if all of the ssids are specified in the
-            # the config will never be written to the AP and the AP will not be
-            # brought up.  For more information about how to configure the
-            # hostapd config info, see the hostapd libraries, which have more
-            # documentation.
-            bss_settings_2g = []
-            bss_settings_5g = []
-            open_network = self.get_open_network(False, [])
-            self.open_network_2g = open_network["2g"]
-            self.open_network_5g = open_network["5g"]
-            wpa2_settings = self.get_psk_network(False, [])
-            self.wpa2_network_2g = wpa2_settings["2g"]
-            self.wpa2_network_5g = wpa2_settings["5g"]
-            bss_settings_2g.append(
-                hostapd_bss_settings.BssSettings(
-                    name=self.wpa2_network_2g["SSID"],
-                    ssid=self.wpa2_network_2g["SSID"],
-                    security=hostapd_security.Security(
-                        security_mode=self.wpa2_network_2g["security"],
-                        password=self.wpa2_network_2g["password"],
-                    ),
-                )
-            )
-            bss_settings_5g.append(
-                hostapd_bss_settings.BssSettings(
-                    name=self.wpa2_network_5g["SSID"],
-                    ssid=self.wpa2_network_5g["SSID"],
-                    security=hostapd_security.Security(
-                        security_mode=self.wpa2_network_5g["security"],
-                        password=self.wpa2_network_5g["password"],
-                    ),
-                )
-            )
-            self.ap_2g = hostapd_ap_preset.create_ap_preset(
-                iface_wlan_2g=self.access_point.wlan_2g,
-                iface_wlan_5g=self.access_point.wlan_5g,
-                channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-                ssid=self.open_network_2g["SSID"],
-                bss_settings=bss_settings_2g,
-            )
-            self.ap_5g = hostapd_ap_preset.create_ap_preset(
-                iface_wlan_2g=self.access_point.wlan_2g,
-                iface_wlan_5g=self.access_point.wlan_5g,
-                channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                ssid=self.open_network_5g["SSID"],
-                bss_settings=bss_settings_5g,
-            )
-
-        if "wlan_open_network_2g" in self.user_params:
-            self.open_network_2g = self.user_params.get("wlan_open_network_2g")
-        elif self.access_point:
-            self.start_access_point_2g = True
-        else:
-            raise Exception("Missing parameter in config " "(wlan_open_network_2g)")
-
-        if "wlan_open_network_5g" in self.user_params:
-            self.open_network_5g = self.user_params.get("wlan_open_network_5g")
-        elif self.access_point:
-            self.start_access_point_5g = True
-        else:
-            raise Exception("Missing parameter in config " "(wlan_open_network_5g)")
-
-        if "wlan_wpa2_network_2g" in self.user_params:
-            self.wpa2_network_2g = self.user_params.get("wlan_wpa2_network_2g")
-        elif self.access_point:
-            self.start_access_point_2g = True
-        else:
-            raise Exception("Missing parameter in config " "(wlan_wpa2_network_2g)")
-
-        if "wlan_wpa2_network_5g" in self.user_params:
-            self.wpa2_network_5g = self.user_params.get("wlan_wpa2_network_5g")
-        elif self.access_point:
-            self.start_access_point_5g = True
-        else:
-            raise Exception("Missing parameter in config " "(wlan_wpa2_network_5g)")
-
-        # Only bring up the APs that are needed for the test.  Each ssid is
-        # randomly generated so there is no chance of re associating to a
-        # previously saved ssid on the device.
-        if self.start_access_point_2g:
-            self.start_access_point = True
-            self.access_point.start_ap(hostapd_config=self.ap_2g)
-        if self.start_access_point_5g:
-            self.start_access_point = True
-            self.access_point.start_ap(hostapd_config=self.ap_5g)
-
-    def setup_test(self):
-        for fd in self.fuchsia_devices:
-            # stub for setting up all the fuchsia devices in the testbed.
-            pass
-
-    def teardown_test(self):
-        for fd in self.fuchsia_devices:
-            fd.sl4f.wlan_lib.wlanDisconnect()
-
-    def teardown_class(self):
-        if self.start_access_point:
-            self.download_ap_logs()
-            self.access_point.stop_all_aps()
-
-    def on_fail(self, test_name, begin_time):
-        for fd in self.fuchsia_devices:
-            super().on_device_fail(fd, test_name, begin_time)
-            fd.configure_wlan(association_mechanism="drivers")
-
-    """Helper Functions"""
-
-    def check_connect_response(self, connection_response):
-        """Checks the result of connecting to a wlan.
-        Args:
-            connection_response: The response from SL4F after attempting
-                to connect to a wlan.
-        """
-        if connection_response.get("error") is None:
-            # the command did not get an error response - go ahead and
-            # check the result
-            connection_result = connection_response.get("result")
-            if connection_result:
-                self.log.info("connection to network successful")
-            else:
-                # ideally, we would have the actual error...  but logging
-                # here to cover that error case
-                raise signals.TestFailure("Connect call failed, aborting test")
-        else:
-            # the response indicates an error - log and raise failure
-            raise signals.TestFailure(
-                "Aborting test - Connect call failed "
-                "with error: %s" % connection_response.get("error")
-            )
-
-    def scan_while_connected(self, wlan_network_params, fd):
-        """Connects to as specified network and initiates a scan
-        Args:
-            wlan_network_params: A dictionary containing wlan
-                infomation.
-            fd: The fuchsia device to connect to the wlan.
-        """
-        target_ssid = wlan_network_params["SSID"]
-        self.log.info("got the ssid! %s", target_ssid)
-        target_pwd = None
-        if "password" in wlan_network_params:
-            target_pwd = wlan_network_params["password"]
-
-        bss_scan_response = fd.sl4f.wlan_lib.wlanScanForBSSInfo().get("result")
-        connection_response = fd.sl4f.wlan_lib.wlanConnectToNetwork(
-            target_ssid, bss_scan_response[target_ssid][0], target_pwd=target_pwd
-        )
-        self.check_connect_response(connection_response)
-        self.basic_scan_request(fd)
-
-    def basic_scan_request(self, fd):
-        """Initiates a basic scan on a Fuchsia device
-        Args:
-            fd: A fuchsia device
-        """
-        start_time = datetime.now()
-
-        scan_response = fd.sl4f.wlan_lib.wlanStartScan()
-
-        # first check if we received an error
-        if scan_response.get("error") is None:
-            # the scan command did not get an error response - go ahead
-            # and check for scan results
-            scan_results = scan_response["result"]
-        else:
-            # the response indicates an error - log and raise failure
-            raise signals.TestFailure(
-                "Aborting test - scan failed with "
-                "error: %s" % scan_response.get("error")
-            )
-
-        self.log.info("scan contained %d results", len(scan_results))
-
-        total_time_ms = (datetime.now() - start_time).total_seconds() * 1000
-        self.log.info("scan time: %d ms", total_time_ms)
-
-        if len(scan_results) > 0:
-            raise signals.TestPass(
-                details="", extras={"Scan time": "%d" % total_time_ms}
-            )
-        else:
-            raise signals.TestFailure("Scan failed or did not " "find any networks")
-
-    """Tests"""
-
-    def test_basic_scan_request(self):
-        """Verify a general scan trigger returns at least one result"""
-        for fd in self.fuchsia_devices:
-            self.basic_scan_request(fd)
-
-    def test_scan_while_connected_open_network_2g(self):
-        for fd in self.fuchsia_devices:
-            self.scan_while_connected(self.open_network_2g, fd)
-
-    def test_scan_while_connected_wpa2_network_2g(self):
-        for fd in self.fuchsia_devices:
-            self.scan_while_connected(self.wpa2_network_2g, fd)
-
-    def test_scan_while_connected_open_network_5g(self):
-        for fd in self.fuchsia_devices:
-            self.scan_while_connected(self.open_network_5g, fd)
-
-    def test_scan_while_connected_wpa2_network_5g(self):
-        for fd in self.fuchsia_devices:
-            self.scan_while_connected(self.wpa2_network_5g, fd)
-
-
-if __name__ == "__main__":
-    test_runner.main()
diff --git a/src/antlion/tests/wlan/functional/WlanTargetSecurityTest.py b/src/antlion/tests/wlan/functional/WlanTargetSecurityTest.py
deleted file mode 100644
index 6cfdc0a..0000000
--- a/src/antlion/tests/wlan/functional/WlanTargetSecurityTest.py
+++ /dev/null
@@ -1,390 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import utils
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.hostapd_security import Security
-from antlion.test_utils.wifi import base_test
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-
-from mobly import asserts, test_runner
-
-
-# TODO(fxb/68956): Add security protocol check to mixed mode tests when info is
-# available.
-class WlanTargetSecurityTest(base_test.WifiBaseTest):
-    """Tests Fuchsia's target security concept and security upgrading
-
-    Testbed Requirements:
-    * One Fuchsia device
-    * One Whirlwind Access Point
-    """
-
-    def setup_class(self):
-        if "dut" in self.user_params and self.user_params["dut"] != "fuchsia_devices":
-            raise AttributeError(
-                "WlanTargetSecurityTest is only relevant for Fuchsia devices."
-            )
-
-        self.dut = create_wlan_device(self.fuchsia_devices[0])
-        if self.dut.device.association_mechanism != "policy":
-            raise AttributeError("Must use WLAN policy layer to test target security.")
-
-        self.access_point = self.access_points[0]
-
-    def teardown_class(self):
-        self.dut.disconnect()
-        self.access_point.stop_all_aps()
-
-    def teardown_test(self):
-        self.dut.disconnect()
-        self.download_ap_logs()
-        self.access_point.stop_all_aps()
-
-    def on_fail(self, test_name, begin_time):
-        super().on_fail(test_name, begin_time)
-        self.access_point.stop_all_aps()
-
-    def on_exception(self, test_name, begin_time):
-        super().on_exception(test_name, begin_time)
-        self.dut.disconnect()
-        self.access_point.stop_all_aps()
-
-    def setup_ap(self, security_mode=None):
-        """Sets up an AP using the provided security mode.
-
-        Args:
-            security_mode: string, security mode for AP
-        Returns:
-            Tuple, (ssid, password). Returns a password even if for open
-                security, since non-open target securities require a credential
-                to attempt a connection.
-        """
-        ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_5G)
-        # Length 13, so it can be used for WEP or WPA
-        password = utils.rand_ascii_str(13)
-        security_profile = None
-
-        if security_mode:
-            security_profile = Security(security_mode=security_mode, password=password)
-
-        setup_ap(
-            access_point=self.access_point,
-            profile_name="whirlwind",
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            ssid=ssid,
-            security=security_profile,
-        )
-
-        return (ssid, password)
-
-    # Open Security on AP
-    def test_associate_open_ap_with_open_target_security(self):
-        ssid, _ = self.setup_ap()
-        asserts.assert_true(self.dut.associate(ssid), "Failed to associate.")
-
-    def test_reject_open_ap_with_wep_target_security(self):
-        ssid, password = self.setup_ap()
-        asserts.assert_false(
-            self.dut.associate(
-                ssid, target_security=hostapd_constants.WEP_STRING, target_pwd=password
-            ),
-            "Should not have associated.",
-        )
-
-    def test_reject_open_ap_with_wpa_target_security(self):
-        ssid, password = self.setup_ap()
-        asserts.assert_false(
-            self.dut.associate(
-                ssid, target_security=hostapd_constants.WPA_STRING, target_pwd=password
-            ),
-            "Should not have associated.",
-        )
-
-    def test_reject_open_ap_with_wpa2_target_security(self):
-        ssid, password = self.setup_ap()
-        asserts.assert_false(
-            self.dut.associate(
-                ssid, target_security=hostapd_constants.WPA2_STRING, target_pwd=password
-            ),
-            "Should not have associated.",
-        )
-
-    def test_reject_open_ap_with_wpa3_target_security(self):
-        ssid, password = self.setup_ap()
-        asserts.assert_false(
-            self.dut.associate(
-                ssid, target_security=hostapd_constants.WPA3_STRING, target_pwd=password
-            ),
-            "Should not have associated.",
-        )
-
-    # WEP Security on AP
-    def test_reject_wep_ap_with_open_target_security(self):
-        ssid, _ = self.setup_ap(hostapd_constants.WEP_STRING)
-        asserts.assert_false(self.dut.associate(ssid), "Should not have associated.")
-
-    def test_associate_wep_ap_with_wep_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WEP_STRING)
-        asserts.assert_true(
-            self.dut.associate(
-                ssid, target_security=hostapd_constants.WEP_STRING, target_pwd=password
-            ),
-            "Failed to associate.",
-        )
-
-    def test_reject_wep_ap_with_wpa_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WEP_STRING)
-        asserts.assert_false(
-            self.dut.associate(
-                ssid, target_security=hostapd_constants.WPA_STRING, target_pwd=password
-            ),
-            "Should not have associated.",
-        )
-
-    def test_reject_wep_ap_with_wpa2_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WEP_STRING)
-        asserts.assert_false(
-            self.dut.associate(
-                ssid, target_security=hostapd_constants.WPA2_STRING, target_pwd=password
-            ),
-            "Should not have associated.",
-        )
-
-    def test_reject_wep_ap_with_wpa3_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WEP_STRING)
-        asserts.assert_false(
-            self.dut.associate(
-                ssid, target_security=hostapd_constants.WPA3_STRING, target_pwd=password
-            ),
-            "Should not have associated.",
-        )
-
-    # WPA Security on AP
-    def test_reject_wpa_ap_with_open_target_security(self):
-        ssid, _ = self.setup_ap(hostapd_constants.WPA_STRING)
-        asserts.assert_false(self.dut.associate(ssid), "Should not have associated.")
-
-    def test_reject_wpa_ap_with_wep_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA_STRING)
-        asserts.assert_false(
-            self.dut.associate(
-                ssid, target_security=hostapd_constants.WEP_STRING, target_pwd=password
-            ),
-            "Should not have associated.",
-        )
-
-    def test_associate_wpa_ap_with_wpa_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA_STRING)
-        asserts.assert_true(
-            self.dut.associate(
-                ssid, target_security=hostapd_constants.WPA_STRING, target_pwd=password
-            ),
-            "Failed to associate.",
-        )
-
-    def test_reject_wpa_ap_with_wpa2_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA_STRING)
-        asserts.assert_false(
-            self.dut.associate(
-                ssid, target_security=hostapd_constants.WPA2_STRING, target_pwd=password
-            ),
-            "Should not have associated.",
-        )
-
-    def test_reject_wpa_ap_with_wpa3_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA_STRING)
-        asserts.assert_false(
-            self.dut.associate(
-                ssid, target_security=hostapd_constants.WPA3_STRING, target_pwd=password
-            ),
-            "Should not have associated.",
-        )
-
-    # WPA2 Security on AP
-    def test_reject_wpa2_ap_with_open_target_security(self):
-        ssid, _ = self.setup_ap(hostapd_constants.WPA2_STRING)
-        asserts.assert_false(self.dut.associate(ssid), "Should not have associated.")
-
-    def test_reject_wpa2_ap_with_wep_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA2_STRING)
-        asserts.assert_false(
-            self.dut.associate(
-                ssid, target_security=hostapd_constants.WEP_STRING, target_pwd=password
-            ),
-            "Should not have associated.",
-        )
-
-    def test_associate_wpa2_ap_with_wpa_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA2_STRING)
-        asserts.assert_true(
-            self.dut.associate(
-                ssid, target_security=hostapd_constants.WPA_STRING, target_pwd=password
-            ),
-            "Failed to associate.",
-        )
-
-    def test_associate_wpa2_ap_with_wpa2_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA2_STRING)
-        asserts.assert_true(
-            self.dut.associate(
-                ssid, target_security=hostapd_constants.WPA2_STRING, target_pwd=password
-            ),
-            "Failed to associate.",
-        )
-
-    def test_reject_wpa2_ap_with_wpa3_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA2_STRING)
-        asserts.assert_false(
-            self.dut.associate(
-                ssid, target_security=hostapd_constants.WPA3_STRING, target_pwd=password
-            ),
-            "Should not have associated.",
-        )
-
-    # WPA/WPA2 Security on AP
-    def test_reject_wpa_wpa2_ap_with_open_target_security(self):
-        ssid, _ = self.setup_ap(hostapd_constants.WPA_MIXED_STRING)
-        asserts.assert_false(self.dut.associate(ssid), "Should not have associated.")
-
-    def test_reject_wpa_wpa2_ap_with_wep_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA_MIXED_STRING)
-        asserts.assert_false(
-            self.dut.associate(
-                ssid, target_security=hostapd_constants.WEP_STRING, target_pwd=password
-            ),
-            "Should not have associated.",
-        )
-
-    def test_associate_wpa_wpa2_ap_with_wpa_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA_MIXED_STRING)
-        asserts.assert_true(
-            self.dut.associate(
-                ssid, target_security=hostapd_constants.WPA_STRING, target_pwd=password
-            ),
-            "Failed to associate.",
-        )
-
-    def test_associate_wpa_wpa2_ap_with_wpa2_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA_MIXED_STRING)
-        asserts.assert_true(
-            self.dut.associate(
-                ssid, target_security=hostapd_constants.WPA2_STRING, target_pwd=password
-            ),
-            "Failed to associate.",
-        )
-
-    def test_reject_wpa_wpa2_ap_with_wpa3_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA_MIXED_STRING)
-        asserts.assert_false(
-            self.dut.associate(
-                ssid, target_security=hostapd_constants.WPA3_STRING, target_pwd=password
-            ),
-            "Should not have associated.",
-        )
-
-    # WPA3 Security on AP
-    def test_reject_wpa3_ap_with_open_target_security(self):
-        ssid, _ = self.setup_ap(hostapd_constants.WPA3_STRING)
-        asserts.assert_false(self.dut.associate(ssid), "Should not have associated.")
-
-    def test_reject_wpa3_ap_with_wep_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA3_STRING)
-        asserts.assert_false(
-            self.dut.associate(
-                ssid, target_security=hostapd_constants.WEP_STRING, target_pwd=password
-            ),
-            "Should not have associated.",
-        )
-
-    def test_associate_wpa3_ap_with_wpa_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA3_STRING)
-        asserts.assert_false(
-            self.dut.associate(
-                ssid, target_security=hostapd_constants.WPA_STRING, target_pwd=password
-            ),
-            "Expected failure to associate. WPA credentials for WPA3 was "
-            "temporarily disabled, see https://fxbug.dev/85817 for context. "
-            "If this feature was reenabled, please update this test's "
-            "expectation.",
-        )
-
-    def test_associate_wpa3_ap_with_wpa2_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA3_STRING)
-        asserts.assert_true(
-            self.dut.associate(
-                ssid, target_security=hostapd_constants.WPA2_STRING, target_pwd=password
-            ),
-            "Failed to associate.",
-        )
-
-    def test_associate_wpa3_ap_with_wpa3_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA3_STRING)
-        asserts.assert_true(
-            self.dut.associate(
-                ssid, target_security=hostapd_constants.WPA3_STRING, target_pwd=password
-            ),
-            "Failed to associate.",
-        )
-
-    # WPA2/WPA3 Security on AP
-    def test_reject_wpa2_wpa3_ap_with_open_target_security(self):
-        ssid, _ = self.setup_ap(hostapd_constants.WPA2_WPA3_MIXED_STRING)
-        asserts.assert_false(self.dut.associate(ssid), "Should not have associated.")
-
-    def test_reject_wpa2_wpa3_ap_with_wep_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA2_WPA3_MIXED_STRING)
-        asserts.assert_false(
-            self.dut.associate(
-                ssid, target_security=hostapd_constants.WEP_STRING, target_pwd=password
-            ),
-            "Should not have associated.",
-        )
-
-    def test_associate_wpa2_wpa3_ap_with_wpa_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA2_WPA3_MIXED_STRING)
-        asserts.assert_false(
-            self.dut.associate(
-                ssid, target_security=hostapd_constants.WPA_STRING, target_pwd=password
-            ),
-            "Expected failure to associate. WPA credentials for WPA3 was "
-            "temporarily disabled, see https://fxbug.dev/85817 for context. "
-            "If this feature was reenabled, please update this test's "
-            "expectation.",
-        )
-
-    def test_associate_wpa2_wpa3_ap_with_wpa2_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA2_WPA3_MIXED_STRING)
-        asserts.assert_true(
-            self.dut.associate(
-                ssid, target_security=hostapd_constants.WPA2_STRING, target_pwd=password
-            ),
-            "Failed to associate.",
-        )
-
-    def test_associate_wpa2_wpa3_ap_with_wpa3_target_security(self):
-        ssid, password = self.setup_ap(hostapd_constants.WPA2_WPA3_MIXED_STRING)
-        asserts.assert_true(
-            self.dut.associate(
-                ssid, target_security=hostapd_constants.WPA3_STRING, target_pwd=password
-            ),
-            "Failed to associate.",
-        )
-
-
-if __name__ == "__main__":
-    test_runner.main()
diff --git a/src/antlion/tests/wlan/functional/WlanWirelessNetworkManagementTest.py b/src/antlion/tests/wlan/functional/WlanWirelessNetworkManagementTest.py
deleted file mode 100644
index 4b0e9b0..0000000
--- a/src/antlion/tests/wlan/functional/WlanWirelessNetworkManagementTest.py
+++ /dev/null
@@ -1,421 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-
-from datetime import datetime, timedelta, timezone
-from typing import FrozenSet, Optional
-
-from antlion import utils
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.hostapd_security import Security
-from antlion.controllers.ap_lib.radio_measurement import (
-    BssidInformation,
-    BssidInformationCapabilities,
-    NeighborReportElement,
-    PhyType,
-)
-from antlion.controllers.ap_lib.wireless_network_management import (
-    BssTransitionManagementRequest,
-)
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-from antlion.test_utils.wifi import base_test
-
-from mobly import asserts, signals, test_runner
-
-
-# Antlion can see (via the wlan_features config directive) whether WNM features
-# are enabled, and runs or skips tests depending on presence of WNM features.
-class WlanWirelessNetworkManagementTest(base_test.WifiBaseTest):
-    """Tests Fuchsia's Wireless Network Management (AKA 802.11v) support.
-
-    Testbed Requirements:
-    * One Fuchsia device
-    * One Whirlwind access point
-
-    Existing Fuchsia drivers do not yet support WNM features out-of-the-box, so these
-    tests check that WNM features are not enabled.
-    """
-
-    def setup_class(self):
-        if "dut" in self.user_params and self.user_params["dut"] != "fuchsia_devices":
-            raise AttributeError(
-                "WlanWirelessNetworkManagementTest is only relevant for Fuchsia devices."
-            )
-
-        self.dut = create_wlan_device(self.fuchsia_devices[0])
-        if self.dut.device.association_mechanism != "policy":
-            raise AttributeError("Must use WLAN policy layer to test WNM.")
-        self.access_point = self.access_points[0]
-
-    def teardown_class(self):
-        self.dut.disconnect()
-        self.access_point.stop_all_aps()
-
-    def teardown_test(self):
-        self.dut.disconnect()
-        self.download_ap_logs()
-        self.access_point.stop_all_aps()
-
-    def on_fail(self, test_name: str, begin_time: str):
-        super().on_fail(test_name, begin_time)
-        self.access_point.stop_all_aps()
-
-    def on_exception(self, test_name: str, begin_time: str):
-        super().on_exception(test_name, begin_time)
-        self.dut.disconnect()
-        self.access_point.stop_all_aps()
-
-    def setup_ap(
-        self,
-        ssid: str,
-        security_mode: Optional[str] = None,
-        additional_ap_parameters: Optional[dict] = None,
-        channel: int = hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-        wnm_features: FrozenSet[hostapd_constants.WnmFeature] = frozenset(),
-    ):
-        """Sets up an AP using the provided parameters.
-
-        Args:
-            ssid: SSID for the AP.
-            security_mode: expressed as string (e.g. WPA2, default is None
-                which indicates open security).
-            additional_ap_parameters: A dictionary of parameters that can sent
-                directly into the hostapd config file.
-            channel: which channel number to set the AP to (default is
-                AP_DEFAULT_CHANNEL_2G).
-            wnm_features: Wireless Network Management features to enable
-                (default is no WNM features).
-        """
-        setup_ap(
-            access_point=self.access_point,
-            profile_name="whirlwind",
-            channel=channel,
-            ssid=ssid,
-            security=Security(security_mode),
-            additional_ap_parameters=additional_ap_parameters,
-            wnm_features=wnm_features,
-        )
-
-    def _get_client_mac(self) -> str:
-        """Get the MAC address of the DUT client interface.
-
-        Returns:
-            str, MAC address of the DUT client interface.
-        Raises:
-            ValueError if there is no DUT client interface.
-            ConnectionError if the DUT interface query fails.
-        """
-        wlan_ifaces = self.dut.device.sl4f.wlan_lib.wlanGetIfaceIdList()
-        if wlan_ifaces.get("error"):
-            raise ConnectionError(
-                "Failed to get wlan interface IDs: %s" % wlan_ifaces["error"]
-            )
-
-        for wlan_iface in wlan_ifaces["result"]:
-            iface_info = self.dut.device.sl4f.wlan_lib.wlanQueryInterface(wlan_iface)
-            if iface_info.get("error"):
-                raise ConnectionError(
-                    "Failed to query wlan iface: %s" % iface_info["error"]
-                )
-
-            if iface_info["result"]["role"] == "Client":
-                return utils.mac_address_list_to_str(iface_info["result"]["sta_addr"])
-        raise ValueError(
-            "Failed to get client interface mac address. No client interface found."
-        )
-
-    def test_bss_transition_is_not_advertised_when_ap_supported_dut_unsupported(self):
-        if self.dut.feature_is_present("BSS_TRANSITION_MANAGEMENT"):
-            raise signals.TestSkip("skipping test because BTM feature is present")
-
-        ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
-        wnm_features = frozenset(
-            [hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT]
-        )
-        self.setup_ap(ssid, wnm_features=wnm_features)
-        asserts.assert_true(self.dut.associate(ssid), "Failed to associate.")
-        asserts.assert_true(self.dut.is_connected(), "Failed to connect.")
-        client_mac = self._get_client_mac()
-
-        ext_capabilities = self.access_point.get_sta_extended_capabilities(
-            self.access_point.wlan_2g, client_mac
-        )
-        asserts.assert_false(
-            ext_capabilities.bss_transition,
-            "DUT is incorrectly advertising BSS Transition Management support",
-        )
-
-    def test_bss_transition_is_advertised_when_ap_supported_dut_supported(self):
-        if not self.dut.feature_is_present("BSS_TRANSITION_MANAGEMENT"):
-            raise signals.TestSkip("skipping test because BTM feature is not present")
-
-        ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
-        wnm_features = frozenset(
-            [hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT]
-        )
-        self.setup_ap(ssid, wnm_features=wnm_features)
-        asserts.assert_true(self.dut.associate(ssid), "Failed to associate.")
-        asserts.assert_true(self.dut.is_connected(), "Failed to connect.")
-        client_mac = self._get_client_mac()
-
-        ext_capabilities = self.access_point.get_sta_extended_capabilities(
-            self.access_point.wlan_2g, client_mac
-        )
-        asserts.assert_true(
-            ext_capabilities.bss_transition,
-            "DUT is not advertising BSS Transition Management support",
-        )
-
-    def test_wnm_sleep_mode_is_not_advertised_when_ap_supported_dut_unsupported(self):
-        ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
-        wnm_features = frozenset([hostapd_constants.WnmFeature.WNM_SLEEP_MODE])
-        self.setup_ap(ssid, wnm_features=wnm_features)
-        asserts.assert_true(self.dut.associate(ssid), "Failed to associate.")
-        asserts.assert_true(self.dut.is_connected(), "Failed to connect.")
-        client_mac = self._get_client_mac()
-
-        ext_capabilities = self.access_point.get_sta_extended_capabilities(
-            self.access_point.wlan_2g, client_mac
-        )
-        asserts.assert_false(
-            ext_capabilities.wnm_sleep_mode,
-            "DUT is incorrectly advertising WNM Sleep Mode support",
-        )
-
-    def test_roam_on_btm_req(self):
-        if not self.dut.feature_is_present("BSS_TRANSITION_MANAGEMENT"):
-            raise signals.TestSkip("skipping test because BTM feature is not present")
-
-        ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
-        wnm_features = frozenset(
-            [hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT]
-        )
-        # Setup 2.4 GHz AP.
-        self.setup_ap(
-            ssid,
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            wnm_features=wnm_features,
-        )
-
-        asserts.assert_true(self.dut.associate(ssid), "Failed to associate.")
-        # Verify that DUT is actually associated (as seen from AP).
-        client_mac = self._get_client_mac()
-        asserts.assert_true(
-            client_mac in self.access_point.get_stas(self.access_point.wlan_2g),
-            "Client MAC not included in list of associated STAs on the 2.4GHz band",
-        )
-
-        # Setup 5 GHz AP with same SSID.
-        self.setup_ap(
-            ssid,
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            wnm_features=wnm_features,
-        )
-
-        # Construct a BTM request.
-        dest_bssid = self.access_point.get_bssid_from_ssid(
-            ssid, self.access_point.wlan_5g
-        )
-        dest_bssid_info = BssidInformation(
-            security=True, capabilities=BssidInformationCapabilities()
-        )
-        neighbor_5g_ap = NeighborReportElement(
-            dest_bssid,
-            dest_bssid_info,
-            operating_class=126,
-            channel_number=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            phy_type=PhyType.VHT,
-        )
-        btm_req = BssTransitionManagementRequest(
-            preferred_candidate_list_included=True,
-            disassociation_imminent=True,
-            candidate_list=[neighbor_5g_ap],
-        )
-
-        # Sleep to avoid concurrent scan during reassociation, necessary due to a firmware bug.
-        # TODO(fxbug.dev/117517) Remove when fixed, or when non-firmware BTM support is merged.
-        time.sleep(5)
-
-        # Send BTM request from 2.4 GHz AP to DUT
-        self.access_point.send_bss_transition_management_req(
-            self.access_point.wlan_2g, client_mac, btm_req
-        )
-
-        # Check that DUT has reassociated.
-        REASSOC_DEADLINE = datetime.now(timezone.utc) + timedelta(seconds=2)
-        while datetime.now(timezone.utc) < REASSOC_DEADLINE:
-            if client_mac in self.access_point.get_stas(self.access_point.wlan_5g):
-                break
-            else:
-                time.sleep(0.25)
-
-        # Verify that DUT roamed (as seen from AP).
-        asserts.assert_true(
-            client_mac in self.access_point.get_stas(self.access_point.wlan_5g),
-            "Client MAC not included in list of associated STAs on the 5GHz band",
-        )
-
-    def test_btm_req_ignored_dut_unsupported(self):
-        if self.dut.feature_is_present("BSS_TRANSITION_MANAGEMENT"):
-            raise signals.TestSkip("skipping test because BTM feature is present")
-
-        ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
-        wnm_features = frozenset(
-            [hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT]
-        )
-        # Setup 2.4 GHz AP.
-        self.setup_ap(
-            ssid,
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            wnm_features=wnm_features,
-        )
-
-        asserts.assert_true(self.dut.associate(ssid), "Failed to associate.")
-        # Verify that DUT is actually associated (as seen from AP).
-        client_mac = self._get_client_mac()
-        asserts.assert_true(
-            client_mac in self.access_point.get_stas(self.access_point.wlan_2g),
-            "Client MAC not included in list of associated STAs on the 2.4GHz band",
-        )
-
-        # Setup 5 GHz AP with same SSID.
-        self.setup_ap(
-            ssid,
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            wnm_features=wnm_features,
-        )
-
-        # Construct a BTM request.
-        dest_bssid = self.access_point.get_bssid_from_ssid(
-            ssid, self.access_point.wlan_5g
-        )
-        dest_bssid_info = BssidInformation(
-            security=True, capabilities=BssidInformationCapabilities()
-        )
-        neighbor_5g_ap = NeighborReportElement(
-            dest_bssid,
-            dest_bssid_info,
-            operating_class=126,
-            channel_number=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            phy_type=PhyType.VHT,
-        )
-        btm_req = BssTransitionManagementRequest(
-            disassociation_imminent=True, candidate_list=[neighbor_5g_ap]
-        )
-
-        # Send BTM request from 2.4 GHz AP to DUT
-        self.access_point.send_bss_transition_management_req(
-            self.access_point.wlan_2g, client_mac, btm_req
-        )
-
-        # Check that DUT has not reassociated.
-        REASSOC_DEADLINE = datetime.now(timezone.utc) + timedelta(seconds=2)
-        while datetime.now(timezone.utc) < REASSOC_DEADLINE:
-            # Fail if DUT has reassociated to 5 GHz AP (as seen from AP).
-            if client_mac in self.access_point.get_stas(self.access_point.wlan_5g):
-                raise signals.TestFailure(
-                    "DUT unexpectedly roamed to target BSS after BTM request"
-                )
-            else:
-                time.sleep(0.25)
-
-        # DUT should have stayed associated to original AP.
-        asserts.assert_true(
-            client_mac in self.access_point.get_stas(self.access_point.wlan_2g),
-            "DUT lost association on the 2.4GHz band after BTM request",
-        )
-
-    def test_btm_req_target_ap_rejects_reassoc(self):
-        if not self.dut.feature_is_present("BSS_TRANSITION_MANAGEMENT"):
-            raise signals.TestSkip("skipping test because BTM feature is not present")
-
-        ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
-        wnm_features = frozenset(
-            [hostapd_constants.WnmFeature.BSS_TRANSITION_MANAGEMENT]
-        )
-        # Setup 2.4 GHz AP.
-        self.setup_ap(
-            ssid,
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            wnm_features=wnm_features,
-        )
-
-        asserts.assert_true(self.dut.associate(ssid), "Failed to associate.")
-        # Verify that DUT is actually associated (as seen from AP).
-        client_mac = self._get_client_mac()
-        asserts.assert_true(
-            client_mac in self.access_point.get_stas(self.access_point.wlan_2g),
-            "Client MAC not included in list of associated STAs on the 2.4GHz band",
-        )
-
-        # Setup 5 GHz AP with same SSID, but reject all STAs.
-        reject_all_sta_param = {"max_num_sta": 0}
-        self.setup_ap(
-            ssid,
-            additional_ap_parameters=reject_all_sta_param,
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            wnm_features=wnm_features,
-        )
-
-        # Construct a BTM request.
-        dest_bssid = self.access_point.get_bssid_from_ssid(
-            ssid, self.access_point.wlan_5g
-        )
-        dest_bssid_info = BssidInformation(
-            security=True, capabilities=BssidInformationCapabilities()
-        )
-        neighbor_5g_ap = NeighborReportElement(
-            dest_bssid,
-            dest_bssid_info,
-            operating_class=126,
-            channel_number=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            phy_type=PhyType.VHT,
-        )
-        btm_req = BssTransitionManagementRequest(
-            disassociation_imminent=True, candidate_list=[neighbor_5g_ap]
-        )
-
-        # Sleep to avoid concurrent scan during reassociation, necessary due to a firmware bug.
-        # TODO(fxbug.dev/117517) Remove when fixed, or when non-firmware BTM support is merged.
-        time.sleep(5)
-
-        # Send BTM request from 2.4 GHz AP to DUT
-        self.access_point.send_bss_transition_management_req(
-            self.access_point.wlan_2g, client_mac, btm_req
-        )
-
-        # Check that DUT has not reassociated.
-        REASSOC_DEADLINE = datetime.now(timezone.utc) + timedelta(seconds=2)
-        while datetime.now(timezone.utc) < REASSOC_DEADLINE:
-            # Fail if DUT has reassociated to 5 GHz AP (as seen from AP).
-            if client_mac in self.access_point.get_stas(self.access_point.wlan_5g):
-                raise signals.TestFailure(
-                    "DUT unexpectedly roamed to target BSS after BTM request"
-                )
-            else:
-                time.sleep(0.25)
-
-        # DUT should have stayed associated to original AP.
-        asserts.assert_true(
-            client_mac in self.access_point.get_stas(self.access_point.wlan_2g),
-            "DUT lost association on the 2.4GHz band after BTM request",
-        )
-
-
-if __name__ == "__main__":
-    test_runner.main()
diff --git a/src/antlion/tests/wlan/functional/wlan_reboot_test_params.yaml b/src/antlion/tests/wlan/functional/wlan_reboot_test_params.yaml
deleted file mode 100644
index c559bbc..0000000
--- a/src/antlion/tests/wlan/functional/wlan_reboot_test_params.yaml
+++ /dev/null
@@ -1,2 +0,0 @@
-wlan_reboot_test_params:
-  skip_iperf: false
diff --git a/src/antlion/tests/wlan/misc/WlanInterfaceTest.py b/src/antlion/tests/wlan/misc/WlanInterfaceTest.py
deleted file mode 100644
index 0614901..0000000
--- a/src/antlion/tests/wlan/misc/WlanInterfaceTest.py
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.test_utils.wifi import base_test
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-
-from mobly import signals, test_runner
-
-
-class WlanInterfaceTest(base_test.WifiBaseTest):
-    def setup_class(self):
-        super().setup_class()
-
-        device_type = self.user_params.get("dut", "fuchsia_devices")
-        if device_type == "fuchsia_devices":
-            self.dut = create_wlan_device(self.fuchsia_devices[0])
-        elif device_type == "android_devices":
-            self.dut = create_wlan_device(self.android_devices[0])
-        else:
-            raise ValueError(
-                f'Invalid "dut" type specified in config: "{device_type}".'
-                'Expected "fuchsia_devices" or "android_devices".'
-            )
-
-    def test_destroy_iface(self):
-        """Test that we don't error out when destroying the WLAN interface.
-
-        Steps:
-        1. Find a wlan interface
-        2. Destroy it
-
-        Expected Result:
-        Verify there are no errors in destroying the wlan interface.
-
-        Returns:
-          signals.TestPass if no errors
-          signals.TestFailure if there are any errors during the test.
-
-        TAGS: WLAN
-        Priority: 1
-        """
-        wlan_interfaces = self.dut.get_wlan_interface_id_list()
-        if len(wlan_interfaces) < 1:
-            raise signals.TestFailure("Not enough wlan interfaces for test")
-        if not self.dut.destroy_wlan_interface(wlan_interfaces[0]):
-            raise signals.TestFailure("Failed to destroy WLAN interface")
-        raise signals.TestPass("Success")
-
-
-if __name__ == "__main__":
-    test_runner.main()
diff --git a/src/antlion/tests/wlan/performance/BUILD.gn b/src/antlion/tests/wlan/performance/BUILD.gn
deleted file mode 100644
index 0fb75d9..0000000
--- a/src/antlion/tests/wlan/performance/BUILD.gn
+++ /dev/null
@@ -1,46 +0,0 @@
-# Copyright 2023 The Fuchsia Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//third_party/antlion/antlion_host_test.gni")
-import("//third_party/antlion/environments.gni")
-
-assert(is_host, "antlion tests only supported for host testing")
-
-antlion_host_test("channel_sweep_test") {
-  main_source = "ChannelSweepTest.py"
-  test_params = "channel_sweep_test_params.yaml"
-  environments = display_ap_iperf_envs
-}
-
-antlion_host_test("wlan_rvr_test") {
-  main_source = "WlanRvrTest.py"
-  test_params = "rvr_settings.yaml"
-  environments = display_ap_iperf_attenuator_envs
-}
-
-antlion_host_test("wlan_wmm_test") {
-  main_source = "WmmTest.py"
-
-  # Requires a second station and custom configuration. There are no available
-  # testbeds to support this toplogy. This will remain an at-desk test until an
-  # infra-hosted testbed matching this topology is supported.
-  environments = []
-}
-
-group("e2e_tests") {
-  testonly = true
-  public_deps = [
-    ":wlan_rvr_test($host_toolchain)",
-  ]
-}
-
-group("e2e_tests_manual") {
-  testonly = true
-  public_deps = [
-    # Running ChannelSweepTest is usually only necessary when verifying new WLAN
-    # firmware patches. Take it out of automation; it takes too long otherwise.
-    ":channel_sweep_test($host_toolchain)",
-    ":wlan_wmm_test($host_toolchain)",
-  ]
-}
diff --git a/src/antlion/tests/wlan/performance/ChannelSweepTest.py b/src/antlion/tests/wlan/performance/ChannelSweepTest.py
deleted file mode 100644
index db148e9..0000000
--- a/src/antlion/tests/wlan/performance/ChannelSweepTest.py
+++ /dev/null
@@ -1,1254 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import time
-
-from statistics import pstdev
-
-from antlion import context, utils
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_config, hostapd_constants
-from antlion.controllers.ap_lib.hostapd_security import Security
-from antlion.controllers.iperf_server import IPerfResult
-from antlion.test_utils.wifi import base_test
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-
-from mobly import asserts, test_runner
-
-N_CAPABILITIES_DEFAULT = [
-    hostapd_constants.N_CAPABILITY_LDPC,
-    hostapd_constants.N_CAPABILITY_SGI20,
-    hostapd_constants.N_CAPABILITY_SGI40,
-    hostapd_constants.N_CAPABILITY_TX_STBC,
-    hostapd_constants.N_CAPABILITY_RX_STBC1,
-]
-
-AC_CAPABILITIES_DEFAULT = [
-    hostapd_constants.AC_CAPABILITY_MAX_MPDU_11454,
-    hostapd_constants.AC_CAPABILITY_RXLDPC,
-    hostapd_constants.AC_CAPABILITY_SHORT_GI_80,
-    hostapd_constants.AC_CAPABILITY_TX_STBC_2BY1,
-    hostapd_constants.AC_CAPABILITY_RX_STBC_1,
-    hostapd_constants.AC_CAPABILITY_MAX_A_MPDU_LEN_EXP7,
-    hostapd_constants.AC_CAPABILITY_RX_ANTENNA_PATTERN,
-    hostapd_constants.AC_CAPABILITY_TX_ANTENNA_PATTERN,
-]
-
-DEFAULT_MIN_THROUGHPUT = 0
-DEFAULT_MAX_STD_DEV = 1
-DEFAULT_IPERF_TIMEOUT = 30
-
-DEFAULT_TIME_TO_WAIT_FOR_IP_ADDR = 30
-GRAPH_CIRCLE_SIZE = 10
-IPERF_NO_THROUGHPUT_VALUE = 0
-MAX_2_4_CHANNEL = 14
-TIME_TO_SLEEP_BETWEEN_RETRIES = 1
-TIME_TO_WAIT_FOR_COUNTRY_CODE = 10
-WEP_HEX_STRING_LENGTH = 10
-
-MEGABITS_PER_SECOND = "Mbps"
-
-
-def get_test_name(settings):
-    """Retrieves the test_name value from test_settings"""
-    return settings.get("test_name")
-
-
-class ChannelSweepTest(base_test.WifiBaseTest):
-    """Tests channel performance and regulatory compliance..
-
-    Testbed Requirement:
-    * One ACTS compatible device (dut)
-    * One Access Point
-    * One Linux Machine used as IPerfServer if running performance tests
-    Note: Performance tests should be done in isolated testbed.
-    """
-
-    def __init__(self, controllers):
-        super().__init__(controllers)
-        if "channel_sweep_test_params" in self.user_params:
-            self.time_to_wait_for_ip_addr = self.user_params[
-                "channel_sweep_test_params"
-            ].get("time_to_wait_for_ip_addr", DEFAULT_TIME_TO_WAIT_FOR_IP_ADDR)
-        else:
-            self.time_to_wait_for_ip_addr = DEFAULT_TIME_TO_WAIT_FOR_IP_ADDR
-
-    def setup_class(self):
-        super().setup_class()
-
-        device_type = self.user_params.get("dut", "fuchsia_devices")
-        if device_type == "fuchsia_devices":
-            self.dut = create_wlan_device(self.fuchsia_devices[0])
-        elif device_type == "android_devices":
-            self.dut = create_wlan_device(self.android_devices[0])
-        else:
-            raise ValueError(
-                f'Invalid "dut" type specified in config: "{device_type}".'
-                'Expected "fuchsia_devices" or "android_devices".'
-            )
-
-        self.android_devices = getattr(self, "android_devices", [])
-
-        self.access_point = self.access_points[0]
-        self.access_point.stop_all_aps()
-
-        self.iperf_server = None
-        self.iperf_client = None
-
-        self.channel_sweep_test_params = self.user_params.get(
-            "channel_sweep_test_params", {}
-        )
-        # Allows users to skip the iperf throughput measurements, just verifying
-        # association.
-        if not self.channel_sweep_test_params.get("skip_performance"):
-            try:
-                self.iperf_server = self.iperf_servers[0]
-                self.iperf_server.start()
-            except AttributeError:
-                self.log.warn(
-                    "Missing iperf config. Throughput cannot be measured, so only "
-                    "association will be tested."
-                )
-
-            if hasattr(self, "iperf_clients") and self.iperf_clients:
-                self.iperf_client = self.iperf_clients[0]
-            else:
-                self.iperf_client = self.dut.create_iperf_client()
-
-        self.regulatory_results = "====CountryCode,Channel,Frequency,ChannelBandwith,Connected/Not-Connected====\n"
-
-    def teardown_class(self):
-        super().teardown_class()
-        output_path = context.get_current_context().get_base_output_path()
-        regulatory_save_path = "%s/ChannelSweepTest/%s" % (
-            output_path,
-            "regulatory_results.txt",
-        )
-        f = open(regulatory_save_path, "w")
-        f.write(self.regulatory_results)
-        f.close()
-
-    def setup_test(self):
-        # TODO(fxb/46417): Uncomment when wlanClearCountry is implemented up any
-        # country code changes.
-        # for fd in self.fuchsia_devices:
-        #     phy_ids_response = fd.wlan_lib.wlanPhyIdList()
-        #     if phy_ids_response.get('error'):
-        #         raise ConnectionError(
-        #             'Failed to retrieve phy ids from FuchsiaDevice (%s). '
-        #             'Error: %s' % (fd.ip, phy_ids_response['error']))
-        #     for id in phy_ids_response['result']:
-        #         clear_country_response = fd.wlan_lib.wlanClearCountry(id)
-        #         if clear_country_response.get('error'):
-        #             raise EnvironmentError(
-        #                 'Failed to reset country code on FuchsiaDevice (%s). '
-        #                 'Error: %s' % (fd.ip, clear_country_response['error'])
-        #                 )
-        self.access_point.stop_all_aps()
-        for ad in self.android_devices:
-            ad.droid.wakeLockAcquireBright()
-            ad.droid.wakeUpNow()
-        self.dut.wifi_toggle_state(True)
-        self.dut.disconnect()
-
-    def teardown_test(self):
-        for ad in self.android_devices:
-            ad.droid.wakeLockRelease()
-            ad.droid.goToSleepNow()
-        self.dut.turn_location_off_and_scan_toggle_off()
-        self.dut.disconnect()
-        self.download_ap_logs()
-        self.access_point.stop_all_aps()
-
-    def set_dut_country_code(self, country_code):
-        """Set the country code on the DUT. Then verify that the country
-        code was set successfully
-
-        Args:
-            country_code: string, the 2 character country code to set
-        """
-        self.log.info("Setting DUT country code to %s" % country_code)
-        country_code_response = self.dut.device.sl4f.regulatory_region_lib.setRegion(
-            country_code
-        )
-        if country_code_response.get("error"):
-            raise EnvironmentError(
-                "Failed to set country code (%s) on DUT. Error: %s"
-                % (country_code, country_code_response["error"])
-            )
-
-        self.log.info(
-            "Verifying DUT country code was correctly set to %s." % country_code
-        )
-        phy_ids_response = self.dut.device.sl4f.wlan_lib.wlanPhyIdList()
-        if phy_ids_response.get("error"):
-            raise ConnectionError(
-                "Failed to get phy ids from DUT. Error: %s"
-                % (country_code, phy_ids_response["error"])
-            )
-
-        end_time = time.time() + TIME_TO_WAIT_FOR_COUNTRY_CODE
-        while time.time() < end_time:
-            for id in phy_ids_response["result"]:
-                get_country_response = self.dut.device.sl4f.wlan_lib.wlanGetCountry(id)
-                if get_country_response.get("error"):
-                    raise ConnectionError(
-                        "Failed to query PHY ID (%s) for country. Error: %s"
-                        % (id, get_country_response["error"])
-                    )
-
-                set_code = "".join(
-                    [chr(ascii_char) for ascii_char in get_country_response["result"]]
-                )
-                if set_code != country_code:
-                    self.log.debug(
-                        "PHY (id: %s) has incorrect country code set. "
-                        "Expected: %s, Got: %s" % (id, country_code, set_code)
-                    )
-                    break
-            else:
-                self.log.info("All PHYs have expected country code (%s)" % country_code)
-                break
-            time.sleep(TIME_TO_SLEEP_BETWEEN_RETRIES)
-        else:
-            raise EnvironmentError(
-                "Failed to set DUT country code to %s." % country_code
-            )
-
-    def setup_ap(self, channel, channel_bandwidth, security_profile=None):
-        """Start network on AP with basic configuration.
-
-        Args:
-            channel: int, channel to use for network
-            channel_bandwidth: int, channel bandwidth in mhz to use for network,
-            security_profile: Security object, or None if open
-
-        Returns:
-            string, ssid of network running
-
-        Raises:
-            ConnectionError if network is not started successfully.
-        """
-        if channel > MAX_2_4_CHANNEL:
-            vht_bandwidth = channel_bandwidth
-        else:
-            vht_bandwidth = None
-
-        if channel_bandwidth == hostapd_constants.CHANNEL_BANDWIDTH_20MHZ:
-            n_capabilities = N_CAPABILITIES_DEFAULT + [
-                hostapd_constants.N_CAPABILITY_HT20
-            ]
-        elif (
-            channel_bandwidth == hostapd_constants.CHANNEL_BANDWIDTH_40MHZ
-            or channel_bandwidth == hostapd_constants.CHANNEL_BANDWIDTH_80MHZ
-        ):
-            if hostapd_config.ht40_plus_allowed(channel):
-                extended_channel = [hostapd_constants.N_CAPABILITY_HT40_PLUS]
-            elif hostapd_config.ht40_minus_allowed(channel):
-                extended_channel = [hostapd_constants.N_CAPABILITY_HT40_MINUS]
-            else:
-                raise ValueError("Invalid Channel: %s" % channel)
-            n_capabilities = N_CAPABILITIES_DEFAULT + extended_channel
-        else:
-            raise ValueError("Invalid Bandwidth: %s" % channel_bandwidth)
-        ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
-        try:
-            setup_ap(
-                access_point=self.access_point,
-                profile_name="whirlwind",
-                channel=channel,
-                security=security_profile,
-                n_capabilities=n_capabilities,
-                ac_capabilities=None,
-                force_wmm=True,
-                ssid=ssid,
-                vht_bandwidth=vht_bandwidth,
-                setup_bridge=True,
-            )
-        except Exception as err:
-            raise ConnectionError(
-                "Failed to setup ap on channel: %s, channel bandwidth: %smhz. "
-                "Error: %s" % (channel, channel_bandwidth, err)
-            )
-        else:
-            self.log.info(
-                "Network (ssid: %s) up on channel %s w/ channel bandwidth %smhz"
-                % (ssid, channel, channel_bandwidth)
-            )
-
-        return ssid
-
-    def get_and_verify_iperf_address(self, channel, device, interface=None):
-        """Get ip address from a devices interface and verify it belongs to
-        expected subnet based on APs DHCP config.
-
-        Args:
-            channel: int, channel network is running on, to determine subnet
-            device: device to get ip address for
-            interface (default: None): interface on device to get ip address.
-                If None, uses device.test_interface.
-
-        Returns:
-            String, ip address of device on given interface (or test_interface)
-
-        Raises:
-            ConnectionError, if device does not have a valid ip address after
-                all retries.
-        """
-        if channel <= MAX_2_4_CHANNEL:
-            subnet = self.access_point._AP_2G_SUBNET_STR
-        else:
-            subnet = self.access_point._AP_5G_SUBNET_STR
-        end_time = time.time() + self.time_to_wait_for_ip_addr
-        while time.time() < end_time:
-            if interface:
-                device_addresses = device.get_interface_ip_addresses(interface)
-            else:
-                device_addresses = device.get_interface_ip_addresses(
-                    device.test_interface
-                )
-
-            if device_addresses["ipv4_private"]:
-                for ip_addr in device_addresses["ipv4_private"]:
-                    if utils.ip_in_subnet(ip_addr, subnet):
-                        return ip_addr
-                    else:
-                        self.log.debug(
-                            "Device has an ip address (%s), but it is not in "
-                            "subnet %s" % (ip_addr, subnet)
-                        )
-            else:
-                self.log.debug("Device does not have a valid ip address. Retrying.")
-            time.sleep(TIME_TO_SLEEP_BETWEEN_RETRIES)
-        raise ConnectionError("Device failed to get an ip address.")
-
-    def get_iperf_throughput(
-        self, iperf_server_address, iperf_client_address, reverse=False
-    ):
-        """Run iperf between client and server and get the throughput.
-
-        Args:
-            iperf_server_address: string, ip address of running iperf server
-            iperf_client_address: string, ip address of iperf client (dut)
-            reverse (default: False): If True, run traffic in reverse direction,
-                from server to client.
-
-        Returns:
-            int, iperf throughput OR IPERF_NO_THROUGHPUT_VALUE, if iperf fails
-        """
-        if reverse:
-            self.log.info(
-                "Running IPerf traffic from server (%s) to dut (%s)."
-                % (iperf_server_address, iperf_client_address)
-            )
-            iperf_results_file = self.iperf_client.start(
-                iperf_server_address,
-                "-i 1 -t 10 -R -J",
-                "channel_sweep_rx",
-                timeout=DEFAULT_IPERF_TIMEOUT,
-            )
-        else:
-            self.log.info(
-                "Running IPerf traffic from dut (%s) to server (%s)."
-                % (iperf_client_address, iperf_server_address)
-            )
-            iperf_results_file = self.iperf_client.start(
-                iperf_server_address,
-                "-i 1 -t 10 -J",
-                "channel_sweep_tx",
-                timeout=DEFAULT_IPERF_TIMEOUT,
-            )
-        if iperf_results_file:
-            iperf_results = IPerfResult(
-                iperf_results_file, reporting_speed_units=MEGABITS_PER_SECOND
-            )
-            return iperf_results.avg_send_rate
-        else:
-            return IPERF_NO_THROUGHPUT_VALUE
-
-    def log_to_file_and_throughput_data(
-        self, channel, channel_bandwidth, tx_throughput, rx_throughput
-    ):
-        """Write performance info to csv file and to throughput data.
-
-        Args:
-            channel: int, channel that test was run on
-            channel_bandwidth: int, channel bandwidth the test used
-            tx_throughput: float, throughput value from dut to iperf server
-            rx_throughput: float, throughput value from iperf server to dut
-        """
-        test_name = self.throughput_data["test"]
-        output_path = context.get_current_context().get_base_output_path()
-        log_path = "%s/ChannelSweepTest/%s" % (output_path, test_name)
-        if not os.path.exists(log_path):
-            os.makedirs(log_path)
-        log_file = "%s/%s_%smhz.csv" % (log_path, test_name, channel_bandwidth)
-        self.log.info("Writing IPerf results for %s to %s" % (test_name, log_file))
-        with open(log_file, "a") as csv_file:
-            csv_file.write("%s,%s,%s\n" % (channel, tx_throughput, rx_throughput))
-        self.throughput_data["results"][str(channel)] = {
-            "tx_throughput": tx_throughput,
-            "rx_throughput": rx_throughput,
-        }
-
-    def write_graph(self):
-        """Create graph html files from throughput data, plotting channel vs
-        tx_throughput and channel vs rx_throughput.
-        """
-        # If performance measurement is skipped
-        if not self.iperf_server:
-            return
-
-        try:
-            from bokeh.plotting import ColumnDataSource
-            from bokeh.plotting import figure
-            from bokeh.plotting import output_file
-            from bokeh.plotting import save
-        except ImportError as e:
-            self.log.warn(
-                "bokeh is not installed: skipping creation of graphs. "
-                "Note CSV files are still available. If graphs are "
-                'desired, install antlion with the "bokeh" feature.'
-            )
-            return
-
-        output_path = context.get_current_context().get_base_output_path()
-        test_name = self.throughput_data["test"]
-        channel_bandwidth = self.throughput_data["channel_bandwidth"]
-        output_file_name = "%s/ChannelSweepTest/%s/%s_%smhz.html" % (
-            output_path,
-            test_name,
-            test_name,
-            channel_bandwidth,
-        )
-        output_file(output_file_name)
-        channels = []
-        tx_throughputs = []
-        rx_throughputs = []
-        for channel in self.throughput_data["results"]:
-            channels.append(str(channel))
-            tx_throughputs.append(
-                self.throughput_data["results"][channel]["tx_throughput"]
-            )
-            rx_throughputs.append(
-                self.throughput_data["results"][channel]["rx_throughput"]
-            )
-        channel_vs_throughput_data = ColumnDataSource(
-            data=dict(
-                channels=channels,
-                tx_throughput=tx_throughputs,
-                rx_throughput=rx_throughputs,
-            )
-        )
-        TOOLTIPS = [
-            ("Channel", "@channels"),
-            ("TX_Throughput", "@tx_throughput"),
-            ("RX_Throughput", "@rx_throughput"),
-        ]
-        channel_vs_throughput_graph = figure(
-            title="Channels vs. Throughput",
-            x_axis_label="Channels",
-            x_range=channels,
-            y_axis_label="Throughput",
-            tooltips=TOOLTIPS,
-        )
-        channel_vs_throughput_graph.sizing_mode = "stretch_both"
-        channel_vs_throughput_graph.title.align = "center"
-        channel_vs_throughput_graph.line(
-            "channels",
-            "tx_throughput",
-            source=channel_vs_throughput_data,
-            line_width=2,
-            line_color="blue",
-            legend_label="TX_Throughput",
-        )
-        channel_vs_throughput_graph.circle(
-            "channels",
-            "tx_throughput",
-            source=channel_vs_throughput_data,
-            size=GRAPH_CIRCLE_SIZE,
-            color="blue",
-        )
-        channel_vs_throughput_graph.line(
-            "channels",
-            "rx_throughput",
-            source=channel_vs_throughput_data,
-            line_width=2,
-            line_color="red",
-            legend_label="RX_Throughput",
-        )
-        channel_vs_throughput_graph.circle(
-            "channels",
-            "rx_throughput",
-            source=channel_vs_throughput_data,
-            size=GRAPH_CIRCLE_SIZE,
-            color="red",
-        )
-
-        channel_vs_throughput_graph.legend.location = "top_left"
-        graph_file = save([channel_vs_throughput_graph])
-        self.log.info("Saved graph to %s" % graph_file)
-
-    def verify_standard_deviation(self, max_std_dev):
-        """Verifies the standard deviation of the throughput across the channels
-        does not exceed the max_std_dev value.
-
-        Args:
-            max_std_dev: float, max standard deviation of throughput for a test
-                to pass (in Mb/s)
-
-        Raises:
-            TestFailure, if standard deviation of throughput exceeds max_std_dev
-        """
-        # If performance measurement is skipped
-        if not self.iperf_server:
-            return
-        self.log.info(
-            "Verifying standard deviation across channels does not "
-            "exceed max standard deviation of %s Mb/s" % max_std_dev
-        )
-        tx_values = []
-        rx_values = []
-        for channel in self.throughput_data["results"]:
-            if self.throughput_data["results"][channel]["tx_throughput"] is not None:
-                tx_values.append(
-                    self.throughput_data["results"][channel]["tx_throughput"]
-                )
-            if self.throughput_data["results"][channel]["rx_throughput"] is not None:
-                rx_values.append(
-                    self.throughput_data["results"][channel]["rx_throughput"]
-                )
-        tx_std_dev = pstdev(tx_values)
-        rx_std_dev = pstdev(rx_values)
-        if tx_std_dev > max_std_dev or rx_std_dev > max_std_dev:
-            asserts.fail(
-                "With %smhz channel bandwidth, throughput standard "
-                "deviation (tx: %s Mb/s, rx: %s Mb/s) exceeds max standard "
-                "deviation (%s Mb/s)."
-                % (
-                    self.throughput_data["channel_bandwidth"],
-                    tx_std_dev,
-                    rx_std_dev,
-                    max_std_dev,
-                )
-            )
-        else:
-            asserts.explicit_pass(
-                "Throughput standard deviation (tx: %s Mb/s, rx: %s Mb/s) "
-                "with %smhz channel bandwidth does not exceed maximum (%s Mb/s)."
-                % (
-                    tx_std_dev,
-                    rx_std_dev,
-                    self.throughput_data["channel_bandwidth"],
-                    max_std_dev,
-                )
-            )
-
-    def run_channel_performance_tests(self, settings):
-        """Test function for running channel performance tests. Used by both
-        explicit test cases and debug test cases from config. Runs a performance
-        test for each channel in test_channels with test_channel_bandwidth, then
-        writes a graph and csv file of the channel vs throughput.
-
-        Args:
-            settings: dict, containing the following test settings
-                test_channels: list of channels to test.
-                test_channel_bandwidth: int, channel bandwidth to use for test.
-                test_security (optional): string, security type to use for test.
-                min_tx_throughput (optional, default: 0): float, minimum tx
-                    throughput threshold to pass individual channel tests
-                    (in Mb/s).
-                min_rx_throughput (optional, default: 0): float, minimum rx
-                    throughput threshold to pass individual channel tests
-                    (in Mb/s).
-                max_std_dev (optional, default: 1): float, maximum standard
-                    deviation of throughput across all test channels to pass
-                    test (in Mb/s).
-                base_test_name (optional): string, test name prefix to use with
-                    generated subtests.
-                country_name (optional): string, country name from
-                    hostapd_constants to set on device.
-                country_code (optional): string, two-char country code to set on
-                    the DUT. Takes priority over country_name.
-                test_name (debug tests only): string, the test name for this
-                    parent test case from the config file. In explicit tests,
-                    this is not necessary.
-
-        Writes:
-            CSV file: channel, tx_throughput, rx_throughput
-                for every test channel.
-            Graph: channel vs tx_throughput & channel vs rx_throughput
-
-        Raises:
-            TestFailure, if throughput standard deviation across channels
-                exceeds max_std_dev
-
-            Example Explicit Test (see EOF for debug JSON example):
-            def test_us_2g_20mhz_wpa2(self):
-                self.run_channel_performance_tests(
-                        dict(
-                        test_channels=hostapd_constants.US_CHANNELS_2G,
-                        test_channel_bandwidth=20,
-                        test_security=hostapd_constants.WPA2_STRING,
-                        min_tx_throughput=2,
-                        min_rx_throughput=4,
-                        max_std_dev=0.75,
-                        country_code='US',
-                        base_test_name='test_us'))
-        """
-        test_channels = settings["test_channels"]
-        test_channel_bandwidth = settings["test_channel_bandwidth"]
-        test_security = settings.get("test_security", None)
-        test_name = settings.get("test_name", self.test_name)
-        base_test_name = settings.get("base_test_name", "test")
-        min_tx_throughput = settings.get("min_tx_throughput", DEFAULT_MIN_THROUGHPUT)
-        min_rx_throughput = settings.get("min_rx_throughput", DEFAULT_MIN_THROUGHPUT)
-        max_std_dev = settings.get("max_std_dev", DEFAULT_MAX_STD_DEV)
-        country_code = settings.get("country_code")
-        country_name = settings.get("country_name")
-        country_label = None
-
-        if country_code:
-            country_label = country_code
-            self.set_dut_country_code(country_code)
-        elif country_name:
-            country_label = country_name
-            code = hostapd_constants.COUNTRY_CODE[country_name]["country_code"]
-            self.set_dut_country_code(code)
-
-        self.throughput_data = {
-            "test": test_name,
-            "channel_bandwidth": test_channel_bandwidth,
-            "results": {},
-        }
-        test_list = []
-        for channel in test_channels:
-            sub_test_name = "test_%schannel_%s_%smhz_%s_performance" % (
-                "%s_" % country_label if country_label else "",
-                channel,
-                test_channel_bandwidth,
-                test_security if test_security else "open",
-            )
-            test_list.append(
-                {
-                    "test_name": sub_test_name,
-                    "channel": int(channel),
-                    "channel_bandwidth": int(test_channel_bandwidth),
-                    "security": test_security,
-                    "min_tx_throughput": min_tx_throughput,
-                    "min_rx_throughput": min_rx_throughput,
-                }
-            )
-        self.run_generated_testcases(
-            self.get_channel_performance, settings=test_list, name_func=get_test_name
-        )
-        self.log.info("Channel tests completed.")
-        self.write_graph()
-        self.verify_standard_deviation(max_std_dev)
-
-    def get_channel_performance(self, settings):
-        """Run a single channel performance test and logs results to csv file
-        and throughput data. Run with generated sub test cases in
-        run_channel_performance_tests.
-
-        1. Sets up network with test settings
-        2. Associates DUT
-        3. Runs traffic between DUT and iperf server (both directions)
-        4. Logs channel, tx_throughput (Mb/s), and rx_throughput (Mb/s) to
-           log file and throughput data.
-        5. Checks throughput values against minimum throughput thresholds.
-
-        Args:
-            settings: see run_channel_performance_tests
-
-        Raises:
-            TestFailure, if throughput (either direction) is less than
-                the directions given minimum throughput threshold.
-        """
-        channel = settings["channel"]
-        channel_bandwidth = settings["channel_bandwidth"]
-        security = settings["security"]
-        test_name = settings["test_name"]
-        min_tx_throughput = settings["min_tx_throughput"]
-        min_rx_throughput = settings["min_rx_throughput"]
-        if security:
-            if security == hostapd_constants.WEP_STRING:
-                password = utils.rand_hex_str(WEP_HEX_STRING_LENGTH)
-            else:
-                password = utils.rand_ascii_str(hostapd_constants.MIN_WPA_PSK_LENGTH)
-            security_profile = Security(security_mode=security, password=password)
-            target_security = (
-                hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
-                    security
-                )
-            )
-        else:
-            password = None
-            security_profile = None
-            target_security = None
-        ssid = self.setup_ap(channel, channel_bandwidth, security_profile)
-        associated = self.dut.associate(
-            ssid, target_pwd=password, target_security=target_security
-        )
-        if not associated:
-            if self.iperf_server:
-                self.log_to_file_and_throughput_data(
-                    channel, channel_bandwidth, None, None
-                )
-            asserts.fail("Device failed to associate with network %s" % ssid)
-        self.log.info("DUT (%s) connected to network %s." % (self.dut.device.ip, ssid))
-        if self.iperf_server:
-            self.iperf_server.renew_test_interface_ip_address()
-            self.log.info(
-                "Getting ip address for iperf server. Will retry for %s seconds."
-                % self.time_to_wait_for_ip_addr
-            )
-            iperf_server_address = self.get_and_verify_iperf_address(
-                channel, self.iperf_server
-            )
-            self.log.info(
-                "Getting ip address for DUT. Will retry for %s seconds."
-                % self.time_to_wait_for_ip_addr
-            )
-            iperf_client_address = self.get_and_verify_iperf_address(
-                channel, self.dut.device, self.iperf_client.test_interface
-            )
-            tx_throughput = self.get_iperf_throughput(
-                iperf_server_address, iperf_client_address
-            )
-            rx_throughput = self.get_iperf_throughput(
-                iperf_server_address, iperf_client_address, reverse=True
-            )
-            self.log_to_file_and_throughput_data(
-                channel, channel_bandwidth, tx_throughput, rx_throughput
-            )
-            self.log.info(
-                "Throughput (tx, rx): (%s Mb/s, %s Mb/s), "
-                "Minimum threshold (tx, rx): (%s Mb/s, %s Mb/s)"
-                % (tx_throughput, rx_throughput, min_tx_throughput, min_rx_throughput)
-            )
-            base_message = (
-                "Actual throughput (on channel: %s, channel bandwidth: "
-                "%s, security: %s)" % (channel, channel_bandwidth, security)
-            )
-            if (
-                not tx_throughput
-                or not rx_throughput
-                or tx_throughput < min_tx_throughput
-                or rx_throughput < min_rx_throughput
-            ):
-                asserts.fail("%s below the minimum threshold." % base_message)
-            asserts.explicit_pass("%s above the minimum threshold." % base_message)
-        else:
-            asserts.explicit_pass(
-                "Association test pass. No throughput measurement taken."
-            )
-
-    def verify_regulatory_compliance(self, settings):
-        """Test function for regulatory compliance tests. Verify device complies
-        with provided regulatory requirements.
-
-        Args:
-            settings: dict, containing the following test settings
-                test_channels: dict, mapping channels to a set of the channel
-                    bandwidths to test (see example for using JSON). Defaults
-                    to hostapd_constants.ALL_CHANNELS.
-                country_code: string, two-char country code to set on device
-                    (prioritized over country_name)
-                country_name: string, country name from hostapd_constants to set
-                    on device.
-                base_test_name (optional): string, test name prefix to use with
-                    generatedsubtests.
-                test_name: string, the test name for this
-                    parent test case from the config file. In explicit tests,
-                    this is not necessary.
-        """
-        country_name = settings.get("country_name")
-        country_code = settings.get("country_code")
-        if not (country_code or country_name):
-            raise ValueError("No country code or name provided.")
-
-        test_channels = settings.get("test_channels", hostapd_constants.ALL_CHANNELS)
-        allowed_channels = settings["allowed_channels"]
-
-        base_test_name = settings.get("base_test_name", "test_compliance")
-
-        if country_code:
-            code = country_code
-        else:
-            code = hostapd_constants.COUNTRY_CODE[country_name]["country_code"]
-
-        self.set_dut_country_code(code)
-
-        test_list = []
-        for channel in test_channels:
-            for channel_bandwidth in test_channels[channel]:
-                sub_test_name = "%s_channel_%s_%smhz" % (
-                    base_test_name,
-                    channel,
-                    channel_bandwidth,
-                )
-                should_associate = (
-                    channel in allowed_channels
-                    and channel_bandwidth in allowed_channels[channel]
-                )
-                # Note: these int conversions because when these tests are
-                # imported via JSON, they may be strings since the channels
-                # will be keys. This makes the json/list test_channels param
-                # behave exactly like the in code dict/set test_channels.
-                test_list.append(
-                    {
-                        "country_code": code,
-                        "channel": int(channel),
-                        "channel_bandwidth": int(channel_bandwidth),
-                        "should_associate": should_associate,
-                        "test_name": sub_test_name,
-                    }
-                )
-        self.run_generated_testcases(
-            test_func=self.verify_channel_compliance,
-            settings=test_list,
-            name_func=get_test_name,
-        )
-
-    def verify_channel_compliance(self, settings):
-        """Verify device complies with provided regulatory requirements for a
-                specific channel and channel bandwidth. Run with generated test cases
-                in the verify_regulatory_compliance parent test.
-        _
-                Args:
-                    settings: see verify_regulatory_compliance`
-        """
-        channel = settings["channel"]
-        channel_bandwidth = settings["channel_bandwidth"]
-        code = settings["country_code"]
-        should_associate = settings["should_associate"]
-
-        ssid = self.setup_ap(channel, channel_bandwidth)
-
-        self.log.info(
-            "Attempting to associate with network (%s) on channel %s @ %smhz. "
-            "Expected behavior: %s"
-            % (
-                ssid,
-                channel,
-                channel_bandwidth,
-                "Device should associate"
-                if should_associate
-                else "Device should NOT associate.",
-            )
-        )
-
-        associated = self.dut.associate(ssid)
-
-        regulatory_result_marker = "REGTRACKER: %s,%s,%s,%s,%s" % (
-            code,
-            channel,
-            "2.4" if channel < 36 else "5",
-            channel_bandwidth,
-            "c" if associated else "nc",
-        )
-        self.regulatory_results += regulatory_result_marker + "\n"
-        self.log.info(regulatory_result_marker)
-
-        if associated == should_associate:
-            asserts.explicit_pass(
-                "Device complied with %s regulatory requirement for channel %s "
-                " with channel bandwidth %smhz. %s"
-                % (
-                    code,
-                    channel,
-                    channel_bandwidth,
-                    "Associated." if associated else "Refused to associate.",
-                )
-            )
-        else:
-            asserts.fail(
-                "Device failed compliance with regulatory domain %s for "
-                "channel %s with channel bandwidth %smhz. Expected: %s, Got: %s"
-                % (
-                    code,
-                    channel,
-                    channel_bandwidth,
-                    "Should associate" if should_associate else "Should not associate",
-                    "Associated" if associated else "Did not associate",
-                )
-            )
-
-    # Helper functions to allow explicit tests throughput and standard deviation
-    # thresholds to be passed in via config.
-    def _get_min_tx_throughput(self, test_name):
-        return (
-            self.user_params.get("channel_sweep_test_params", {})
-            .get(test_name, {})
-            .get("min_tx_throughput", DEFAULT_MIN_THROUGHPUT)
-        )
-
-    def _get_min_rx_throughput(self, test_name):
-        return (
-            self.user_params.get("channel_sweep_test_params", {})
-            .get(test_name, {})
-            .get("min_rx_throughput", DEFAULT_MIN_THROUGHPUT)
-        )
-
-    def _get_max_std_dev(self, test_name):
-        return (
-            self.user_params.get("channel_sweep_test_params", {})
-            .get(test_name, {})
-            .get("min_std_dev", DEFAULT_MAX_STD_DEV)
-        )
-
-    # Channel Performance of US Channels: 570 Test Cases
-    # 36 Test Cases
-    def test_us_20mhz_open_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(
-                test_channels=hostapd_constants.US_CHANNELS_2G
-                + hostapd_constants.US_CHANNELS_5G,
-                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_20MHZ,
-                base_test_name=self.test_name,
-                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                max_std_dev=self._get_max_std_dev(self.test_name),
-            )
-        )
-
-    # 35 Test Cases
-    def test_us_40mhz_open_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(
-                test_channels=hostapd_constants.US_CHANNELS_2G
-                + hostapd_constants.US_CHANNELS_5G[:-1],
-                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_40MHZ,
-                base_test_name=self.test_name,
-                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                max_std_dev=self._get_max_std_dev(self.test_name),
-            )
-        )
-
-    # 24 Test Cases
-    def test_us_80mhz_open_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(
-                test_channels=hostapd_constants.US_CHANNELS_5G[:-1],
-                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_80MHZ,
-                base_test_name=self.test_name,
-                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                max_std_dev=self._get_max_std_dev(self.test_name),
-            )
-        )
-
-    # 36 Test Cases
-    def test_us_20mhz_wep_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(
-                test_channels=hostapd_constants.US_CHANNELS_2G
-                + hostapd_constants.US_CHANNELS_5G,
-                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_20MHZ,
-                test_security=hostapd_constants.WEP_STRING,
-                base_test_name=self.test_name,
-                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                max_std_dev=self._get_max_std_dev(self.test_name),
-            )
-        )
-
-    # 35 Test Cases
-    def test_us_40mhz_wep_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(
-                test_channels=hostapd_constants.US_CHANNELS_2G
-                + hostapd_constants.US_CHANNELS_5G[:-1],
-                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_40MHZ,
-                test_security=hostapd_constants.WEP_STRING,
-                base_test_name=self.test_name,
-                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                max_std_dev=self._get_max_std_dev(self.test_name),
-            )
-        )
-
-    # 24 Test Cases
-    def test_us_80mhz_wep_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(
-                test_channels=hostapd_constants.US_CHANNELS_5G[:-1],
-                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_80MHZ,
-                test_security=hostapd_constants.WEP_STRING,
-                base_test_name=self.test_name,
-                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                max_std_dev=self._get_max_std_dev(self.test_name),
-            )
-        )
-
-    # 36 Test Cases
-    def test_us_20mhz_wpa_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(
-                test_channels=hostapd_constants.US_CHANNELS_2G
-                + hostapd_constants.US_CHANNELS_5G,
-                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_20MHZ,
-                test_security=hostapd_constants.WPA_STRING,
-                base_test_name=self.test_name,
-                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                max_std_dev=self._get_max_std_dev(self.test_name),
-            )
-        )
-
-    # 35 Test Cases
-    def test_us_40mhz_wpa_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(
-                test_channels=hostapd_constants.US_CHANNELS_2G
-                + hostapd_constants.US_CHANNELS_5G[:-1],
-                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_40MHZ,
-                test_security=hostapd_constants.WPA_STRING,
-                base_test_name=self.test_name,
-                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                max_std_dev=self._get_max_std_dev(self.test_name),
-            )
-        )
-
-    # 24 Test Cases
-    def test_us_80mhz_wpa_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(
-                test_channels=hostapd_constants.US_CHANNELS_5G[:-1],
-                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_80MHZ,
-                test_security=hostapd_constants.WPA_STRING,
-                base_test_name=self.test_name,
-                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                max_std_dev=self._get_max_std_dev(self.test_name),
-            )
-        )
-
-    # 36 Test Cases
-    def test_us_20mhz_wpa2_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(
-                test_channels=hostapd_constants.US_CHANNELS_2G
-                + hostapd_constants.US_CHANNELS_5G,
-                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_20MHZ,
-                test_security=hostapd_constants.WPA2_STRING,
-                base_test_name=self.test_name,
-                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                max_std_dev=self._get_max_std_dev(self.test_name),
-            )
-        )
-
-    # 35 Test Cases
-    def test_us_40mhz_wpa2_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(
-                test_channels=hostapd_constants.US_CHANNELS_2G
-                + hostapd_constants.US_CHANNELS_5G[:-1],
-                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_40MHZ,
-                test_security=hostapd_constants.WPA2_STRING,
-                base_test_name=self.test_name,
-                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                max_std_dev=self._get_max_std_dev(self.test_name),
-            )
-        )
-
-    # 24 Test Cases
-    def test_us_80mhz_wpa2_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(
-                test_channels=hostapd_constants.US_CHANNELS_5G[:-1],
-                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_80MHZ,
-                test_security=hostapd_constants.WPA2_STRING,
-                base_test_name=self.test_name,
-                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                max_std_dev=self._get_max_std_dev(self.test_name),
-            )
-        )
-
-    # 36 Test Cases
-    def test_us_20mhz_wpa_wpa2_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(
-                test_channels=hostapd_constants.US_CHANNELS_2G
-                + hostapd_constants.US_CHANNELS_5G,
-                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_20MHZ,
-                test_security=hostapd_constants.WPA_MIXED_STRING,
-                base_test_name=self.test_name,
-                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                max_std_dev=self._get_max_std_dev(self.test_name),
-            )
-        )
-
-    # 35 Test Cases
-    def test_us_40mhz_wpa_wpa2_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(
-                test_channels=hostapd_constants.US_CHANNELS_2G
-                + hostapd_constants.US_CHANNELS_5G[:-1],
-                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_40MHZ,
-                test_security=hostapd_constants.WPA_MIXED_STRING,
-                base_test_name=self.test_name,
-                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                max_std_dev=self._get_max_std_dev(self.test_name),
-            )
-        )
-
-    # 24 Test Cases
-    def test_us_80mhz_wpa_wpa2_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(
-                test_channels=hostapd_constants.US_CHANNELS_5G[:-1],
-                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_80MHZ,
-                test_security=hostapd_constants.WPA_MIXED_STRING,
-                base_test_name=self.test_name,
-                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                max_std_dev=self._get_max_std_dev(self.test_name),
-            )
-        )
-
-    # 36 Test Cases
-    def test_us_20mhz_wpa3_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(
-                test_channels=hostapd_constants.US_CHANNELS_2G
-                + hostapd_constants.US_CHANNELS_5G,
-                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_20MHZ,
-                test_security=hostapd_constants.WPA3_STRING,
-                base_test_name=self.test_name,
-                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                max_std_dev=self._get_max_std_dev(self.test_name),
-            )
-        )
-
-    # 35 Test Cases
-    def test_us_40mhz_wpa3_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(
-                test_channels=hostapd_constants.US_CHANNELS_2G
-                + hostapd_constants.US_CHANNELS_5G[:-1],
-                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_40MHZ,
-                test_security=hostapd_constants.WPA3_STRING,
-                base_test_name=self.test_name,
-                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                max_std_dev=self._get_max_std_dev(self.test_name),
-            )
-        )
-
-    # 24 Test Cases
-    def test_us_80mhz_wpa3_channel_performance(self):
-        self.run_channel_performance_tests(
-            dict(
-                test_channels=hostapd_constants.US_CHANNELS_5G[:-1],
-                test_channel_bandwidth=hostapd_constants.CHANNEL_BANDWIDTH_80MHZ,
-                test_security=hostapd_constants.WPA3_STRING,
-                base_test_name=self.test_name,
-                min_tx_throughput=self._get_min_tx_throughput(self.test_name),
-                min_rx_throughput=self._get_min_rx_throughput(self.test_name),
-                max_std_dev=self._get_max_std_dev(self.test_name),
-            )
-        )
-
-    def test_channel_performance_debug(self):
-        """Run channel performance test cases from the ACTS config file.
-
-        Example:
-        "channel_sweep_test_params": {
-            "debug_channel_performance_tests": [
-                {
-                    "test_name": "test_123_20mhz_wpa2_performance"
-                    "test_channels": [1, 2, 3],
-                    "test_channel_bandwidth": 20,
-                    "test_security": "wpa2",
-                    "base_test_name": "test_123_perf",
-                    "min_tx_throughput": 1.1,
-                    "min_rx_throughput": 3,
-                    "max_std_dev": 0.5
-                },
-                ...
-            ]
-        }
-
-        """
-        asserts.skip_if(
-            "debug_channel_performance_tests"
-            not in self.user_params.get("channel_sweep_test_params", {}),
-            "No custom channel performance tests provided in config.",
-        )
-        base_tests = self.user_params["channel_sweep_test_params"][
-            "debug_channel_performance_tests"
-        ]
-        self.run_generated_testcases(
-            self.run_channel_performance_tests,
-            settings=base_tests,
-            name_func=get_test_name,
-        )
-
-    def test_regulatory_compliance(self):
-        """Run regulatory compliance test case from the ACTS config file.
-        Note: only one country_name OR country_code is required.
-
-        Example:
-        "channel_sweep_test_params": {
-            "regulatory_compliance_tests": [
-                {
-                    "test_name": "test_japan_compliance_1_13_36"
-                    "country_name": "JAPAN",
-                    "country_code": "JP",
-                    "test_channels": {
-                        "1": [20, 40], "13": [40], "36": [20, 40, 80]
-                    },
-                    "allowed_channels": {
-                        "1": [20, 40], "36": [20, 40, 80]
-                    },
-                    "base_test_name": "test_japan"
-                },
-                ...
-            ]
-        }
-        """
-        asserts.skip_if(
-            "regulatory_compliance_tests"
-            not in self.user_params.get("channel_sweep_test_params", {}),
-            "No custom regulatory compliance tests provided in config.",
-        )
-
-        # TODO(http://b/280442689): Add "supported_country_codes" and
-        # "unsupported_channels" to test params
-        base_tests = self.user_params["channel_sweep_test_params"][
-            "regulatory_compliance_tests"
-        ]
-        self.run_generated_testcases(
-            self.verify_regulatory_compliance,
-            settings=base_tests,
-            name_func=get_test_name,
-        )
-
-
-if __name__ == "__main__":
-    test_runner.main()
diff --git a/src/antlion/tests/wlan/performance/WlanRvrTest.py b/src/antlion/tests/wlan/performance/WlanRvrTest.py
deleted file mode 100644
index ad97221..0000000
--- a/src/antlion/tests/wlan/performance/WlanRvrTest.py
+++ /dev/null
@@ -1,1137 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import os
-import time
-import logging
-
-from antlion import context
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.radvd import Radvd
-from antlion.controllers.ap_lib.radvd_config import RadvdConfig
-from antlion.controllers.ap_lib.hostapd_security import Security
-from antlion.controllers.attenuator import get_attenuators_for_device
-from antlion.controllers.iperf_server import IPerfResult
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
-from antlion.test_utils.wifi import base_test
-from antlion.utils import rand_ascii_str
-
-from mobly import asserts, test_runner
-
-AP_11ABG_PROFILE_NAME = "whirlwind_11ag_legacy"
-REPORTING_SPEED_UNITS = "Mbps"
-
-RVR_GRAPH_SUMMARY_FILE = "rvr_summary.html"
-
-DAD_TIMEOUT_SEC = 30
-
-
-def create_rvr_graph(test_name, graph_path, graph_data):
-    """Creates the RvR graphs
-    Args:
-        test_name: The name of test that was run.  This is the title of the
-            graph
-        graph_path: Where to put the graph html file.
-        graph_data: A dictionary of the data to be graphed.
-    Returns:
-        A list of bokeh graph objects.
-    """
-    try:
-        from bokeh.plotting import ColumnDataSource
-        from bokeh.plotting import figure
-        from bokeh.plotting import output_file
-        from bokeh.plotting import save
-    except ImportError as e:
-        logging.warn(
-            "bokeh is not installed: skipping creation of graphs. "
-            "Note CSV files are still available. If graphs are "
-            'desired, install antlion with the "bokeh" feature.'
-        )
-        return []
-
-    output_file(
-        "%srvr_throughput_vs_attn_%s.html" % (graph_path, test_name), title=test_name
-    )
-    throughput_vs_attn_data = ColumnDataSource(
-        data=dict(
-            relative_attn=graph_data["throughput_vs_attn"]["relative_attn"],
-            throughput=graph_data["throughput_vs_attn"]["throughput"],
-        )
-    )
-    TOOLTIPS = [("Attenuation", "@relative_attn"), ("Throughput", "@throughput")]
-    throughput_vs_attn_graph = figure(
-        title="Throughput vs Relative Attenuation (Test Case: %s)" % test_name,
-        x_axis_label=graph_data["throughput_vs_attn"]["x_label"],
-        y_axis_label=graph_data["throughput_vs_attn"]["y_label"],
-        x_range=graph_data["throughput_vs_attn"]["relative_attn"],
-        tooltips=TOOLTIPS,
-    )
-    throughput_vs_attn_graph.sizing_mode = "stretch_width"
-    throughput_vs_attn_graph.title.align = "center"
-    throughput_vs_attn_graph.line(
-        "relative_attn", "throughput", source=throughput_vs_attn_data, line_width=2
-    )
-    throughput_vs_attn_graph.circle(
-        "relative_attn", "throughput", source=throughput_vs_attn_data, size=10
-    )
-    save([throughput_vs_attn_graph])
-    return [throughput_vs_attn_graph]
-
-
-def write_csv_rvr_data(test_name, csv_path, csv_data):
-    """Writes the CSV data for the RvR test
-    Args:
-        test_name: The name of test that was run.
-        csv_path: Where to put the csv file.
-        csv_data: A dictionary of the data to be put in the csv file.
-    """
-    csv_file_name = "%srvr_throughput_vs_attn_%s.csv" % (csv_path, test_name)
-    throughput = csv_data["throughput_vs_attn"]["throughput"]
-    relative_attn = csv_data["throughput_vs_attn"]["relative_attn"]
-    with open(csv_file_name, "w+") as csv_fileId:
-        csv_fileId.write(
-            "%s,%s\n"
-            % (
-                csv_data["throughput_vs_attn"]["x_label"],
-                csv_data["throughput_vs_attn"]["y_label"],
-            )
-        )
-        for csv_loop_counter in range(0, len(relative_attn)):
-            csv_fileId.write(
-                "%s,%s\n"
-                % (int(relative_attn[csv_loop_counter]), throughput[csv_loop_counter])
-            )
-
-
-class WlanRvrTest(base_test.WifiBaseTest):
-    """Tests running WLAN RvR.
-
-    Test Bed Requirement:
-    * One Android device or Fuchsia device
-    * One Access Point
-    * One attenuator
-    * One Linux iPerf Server
-    """
-
-    def __init__(self, controllers):
-        super().__init__(controllers)
-        self.rvr_graph_summary = []
-
-    def setup_class(self):
-        super().setup_class()
-
-        device_type = self.user_params.get("dut", "fuchsia_devices")
-        if device_type == "fuchsia_devices":
-            self.dut = create_wlan_device(self.fuchsia_devices[0])
-        elif device_type == "android_devices":
-            self.dut = create_wlan_device(self.android_devices[0])
-        else:
-            raise ValueError(
-                f'Invalid "dut" type specified in config: "{device_type}".'
-                'Expected "fuchsia_devices" or "android_devices".'
-            )
-
-        self.starting_attn = self.user_params["rvr_settings"].get("starting_attn", 0)
-
-        self.ending_attn = self.user_params["rvr_settings"].get("ending_attn", 95)
-
-        self.step_size_in_db = self.user_params["rvr_settings"].get(
-            "step_size_in_db", 1
-        )
-
-        self.dwell_time_in_secs = self.user_params["rvr_settings"].get(
-            "dwell_time_in_secs", 10
-        )
-
-        self.reverse_rvr_after_forward = bool(
-            (self.user_params["rvr_settings"].get("reverse_rvr_after_forward", None))
-        )
-
-        self.iperf_flags = self.user_params["rvr_settings"].get("iperf_flags", "-i 1")
-
-        self.iperf_flags = "%s -t %s -J" % (self.iperf_flags, self.dwell_time_in_secs)
-
-        self.debug_loop_count = self.user_params["rvr_settings"].get(
-            "debug_loop_count", 1
-        )
-
-        self.debug_pre_traffic_cmd = self.user_params["rvr_settings"].get(
-            "debug_pre_traffic_cmd", None
-        )
-
-        self.debug_post_traffic_cmd = self.user_params["rvr_settings"].get(
-            "debug_post_traffic_cmd", None
-        )
-
-        self.router_adv_daemon = None
-
-        if self.ending_attn == "auto":
-            self.use_auto_end = True
-            self.ending_attn = 100
-            if self.step_size_in_db > 2:
-                asserts.fail(
-                    "When using an ending attenuation of 'auto' "
-                    "please use a value < 2db.  Larger jumps will "
-                    "break the test reporting."
-                )
-
-        self.access_point = self.access_points[0]
-        self.attenuators_2g = get_attenuators_for_device(
-            self.controller_configs["AccessPoint"][0]["Attenuator"],
-            self.attenuators,
-            "attenuator_ports_wifi_2g",
-        )
-        self.attenuators_5g = get_attenuators_for_device(
-            self.controller_configs["AccessPoint"][0]["Attenuator"],
-            self.attenuators,
-            "attenuator_ports_wifi_5g",
-        )
-
-        self.iperf_server = self.iperf_servers[0]
-
-        if hasattr(self, "iperf_clients") and self.iperf_clients:
-            self.dut_iperf_client = self.iperf_clients[0]
-        else:
-            self.dut_iperf_client = self.dut.create_iperf_client()
-
-        self.access_point.stop_all_aps()
-
-    def setup_test(self):
-        if self.iperf_server:
-            self.iperf_server.start()
-        if hasattr(self, "android_devices"):
-            for ad in self.android_devices:
-                ad.droid.wakeLockAcquireBright()
-                ad.droid.wakeUpNow()
-        self.dut.wifi_toggle_state(True)
-
-    def teardown_test(self):
-        self.cleanup_tests()
-
-    def teardown_class(self):
-        if self.router_adv_daemon:
-            self.router_adv_daemon.stop()
-        try:
-            from bokeh.plotting import output_file
-            from bokeh.plotting import save
-
-            output_path = context.get_current_context().get_base_output_path()
-            test_class_name = context.get_current_context().test_class_name
-
-            output_file(
-                f"{output_path}/{test_class_name}/rvr_summary.html", title="RvR Sumamry"
-            )
-            save(list(self.rvr_graph_summary))
-        except ImportError as e:
-            logging.warn(
-                "bokeh is not installed: skipping creation of graphs. "
-                "Note CSV files are still available. If graphs are "
-                'desired, install antlion with the "bokeh" feature.'
-            )
-        except Exception as e:
-            self.log.error(f"Unable to generate RvR summary file: {e}")
-
-        super().teardown_class()
-
-    def on_fail(self, test_name, begin_time):
-        super().on_fail(test_name, begin_time)
-        self.cleanup_tests()
-
-    def cleanup_tests(self):
-        """Cleans up all the dangling pieces of the tests, for example, the
-        iperf server, radvd, all the currently running APs, and the various
-        clients running during the tests.
-        """
-
-        if self.router_adv_daemon:
-            output_path = context.get_current_context().get_base_output_path()
-            full_output_path = os.path.join(output_path, "radvd_log.txt")
-            radvd_log_file = open(full_output_path, "w")
-            radvd_log_file.write(self.router_adv_daemon.pull_logs())
-            radvd_log_file.close()
-            self.router_adv_daemon.stop()
-        if hasattr(self, "android_devices"):
-            for ad in self.android_devices:
-                ad.droid.wakeLockRelease()
-                ad.droid.goToSleepNow()
-        if self.iperf_server:
-            self.iperf_server.stop()
-        self.dut.turn_location_off_and_scan_toggle_off()
-        self.dut.disconnect()
-        self.dut.reset_wifi()
-        self.download_ap_logs()
-        self.access_point.stop_all_aps()
-
-    def _wait_for_ipv4_addrs(self):
-        """Wait for an IPv4 addresses to become available on the DUT and iperf
-        server.
-
-        Returns:
-           A string containing the private IPv4 address of the iperf server.
-
-        Raises:
-            TestFailure: If unable to acquire a IPv4 address.
-        """
-        ip_address_checker_counter = 0
-        ip_address_checker_max_attempts = 3
-        while ip_address_checker_counter < ip_address_checker_max_attempts:
-            self.iperf_server.renew_test_interface_ip_address()
-            iperf_server_ip_addresses = self.iperf_server.get_interface_ip_addresses(
-                self.iperf_server.test_interface
-            )
-            dut_ip_addresses = self.dut.device.get_interface_ip_addresses(
-                self.dut_iperf_client.test_interface
-            )
-
-            self.log.info("IPerf server IP info: {}".format(iperf_server_ip_addresses))
-            self.log.info("DUT IP info: {}".format(dut_ip_addresses))
-
-            if not iperf_server_ip_addresses["ipv4_private"]:
-                self.log.warn(
-                    "Unable to get the iperf server IPv4 " "address. Retrying..."
-                )
-                ip_address_checker_counter += 1
-                time.sleep(1)
-                continue
-
-            if dut_ip_addresses["ipv4_private"]:
-                return iperf_server_ip_addresses["ipv4_private"][0]
-
-            self.log.warn(
-                "Unable to get the DUT IPv4 address starting at "
-                'attenuation "{}". Retrying...'.format(self.starting_attn)
-            )
-            ip_address_checker_counter += 1
-            time.sleep(1)
-
-        asserts.fail(
-            "IPv4 addresses are not available on both the DUT and iperf server."
-        )
-
-    # TODO (b/258264565): Merge with fuchsia_device wait_for_ipv6_addr.
-    def _wait_for_dad(self, device, test_interface):
-        """Wait for Duplicate Address Detection to resolve so that an
-        private-local IPv6 address is available for test.
-
-        Args:
-            device: implementor of get_interface_ip_addresses
-            test_interface: name of interface that DAD is operating on
-
-        Returns:
-            A string containing the private-local IPv6 address of the device.
-
-        Raises:
-            TestFailure: If unable to acquire an IPv6 address.
-        """
-        now = time.time()
-        start = now
-        elapsed = now - start
-
-        while elapsed < DAD_TIMEOUT_SEC:
-            addrs = device.get_interface_ip_addresses(test_interface)
-            now = time.time()
-            elapsed = now - start
-            if addrs["ipv6_private_local"]:
-                # DAD has completed
-                addr = addrs["ipv6_private_local"][0]
-                self.log.info('DAD resolved with "{}" after {}s'.format(addr, elapsed))
-                return addr
-            time.sleep(1)
-        else:
-            asserts.fail(
-                "Unable to acquire a private-local IPv6 address for testing "
-                "after {}s".format(elapsed)
-            )
-
-    def run_rvr(
-        self,
-        ssid,
-        security_mode=None,
-        password=None,
-        band="2g",
-        traffic_dir="tx",
-        ip_version=4,
-    ):
-        """Setups and runs the RvR test
-
-        Args:
-            ssid: The SSID for the client to associate to.
-            password: Password for the network, if necessary.
-            band: 2g or 5g
-            traffic_dir: rx or tx, bi is not supported by iperf3
-            ip_version: 4 or 6
-
-        Returns:
-            The bokeh graph data.
-        """
-        throughput = []
-        relative_attn = []
-        if band == "2g":
-            rvr_attenuators = self.attenuators_2g
-        elif band == "5g":
-            rvr_attenuators = self.attenuators_5g
-        else:
-            raise ValueError("Invalid WLAN band specified: %s" % band)
-        if ip_version == 6:
-            self.router_adv_daemon = Radvd(
-                self.access_point.ssh,
-                self.access_point.interfaces.get_bridge_interface()[0],
-            )
-            radvd_config = RadvdConfig()
-            self.router_adv_daemon.start(radvd_config)
-
-        for _ in range(0, self.debug_loop_count):
-            for rvr_attenuator in rvr_attenuators:
-                rvr_attenuator.set_atten(self.starting_attn)
-
-            associate_counter = 0
-            associate_max_attempts = 3
-            while associate_counter < associate_max_attempts:
-                if self.dut.associate(
-                    ssid,
-                    target_pwd=password,
-                    target_security=hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
-                        security_mode
-                    ),
-                    check_connectivity=False,
-                ):
-                    break
-                else:
-                    associate_counter += 1
-            else:
-                asserts.fail(
-                    "Unable to associate at starting "
-                    "attenuation: %s" % self.starting_attn
-                )
-
-            if ip_version == 4:
-                iperf_server_ip_address = self._wait_for_ipv4_addrs()
-            elif ip_version == 6:
-                self.iperf_server.renew_test_interface_ip_address()
-                self.log.info(
-                    "Waiting for iperf server to complete Duplicate "
-                    "Address Detection..."
-                )
-                iperf_server_ip_address = self._wait_for_dad(
-                    self.iperf_server, self.iperf_server.test_interface
-                )
-
-                self.log.info(
-                    "Waiting for DUT to complete Duplicate Address "
-                    'Detection for "{}"...'.format(self.dut_iperf_client.test_interface)
-                )
-                _ = self._wait_for_dad(
-                    self.dut.device, self.dut_iperf_client.test_interface
-                )
-            else:
-                raise ValueError("Invalid IP version: {}".format(ip_version))
-
-            throughput, relative_attn = self.rvr_loop(
-                traffic_dir,
-                rvr_attenuators,
-                iperf_server_ip_address,
-                ip_version,
-                throughput=throughput,
-                relative_attn=relative_attn,
-            )
-            if self.reverse_rvr_after_forward:
-                throughput, relative_attn = self.rvr_loop(
-                    traffic_dir,
-                    rvr_attenuators,
-                    iperf_server_ip_address,
-                    ip_version,
-                    ssid=ssid,
-                    security_mode=security_mode,
-                    password=password,
-                    reverse=True,
-                    throughput=throughput,
-                    relative_attn=relative_attn,
-                )
-            self.dut.disconnect()
-
-        throughput_vs_attn = {
-            "throughput": throughput,
-            "relative_attn": relative_attn,
-            "x_label": "Attenuation(db)",
-            "y_label": "Throughput(%s)" % REPORTING_SPEED_UNITS,
-        }
-        graph_data = {"throughput_vs_attn": throughput_vs_attn}
-        return graph_data
-
-    def rvr_loop(
-        self,
-        traffic_dir,
-        rvr_attenuators,
-        iperf_server_ip_address,
-        ip_version,
-        ssid=None,
-        security_mode=None,
-        password=None,
-        reverse=False,
-        throughput=None,
-        relative_attn=None,
-    ):
-        """The loop that goes through each attenuation level and runs the iperf
-        throughput pair.
-        Args:
-            traffic_dir: The traffic direction from the perspective of the DUT.
-            rvr_attenuators: A list of attenuators to set.
-            iperf_server_ip_address: The IP address of the iperf server.
-            ssid: The ssid of the wireless network that the should associated
-                to.
-            password: Password of the wireless network.
-            reverse: Whether to run RvR test starting from the highest
-                attenuation and going to the lowest.  This is run after the
-                normal low attenuation to high attenuation RvR test.
-            throughput: The list of throughput data for the test.
-            relative_attn: The list of attenuation data for the test.
-
-        Returns:
-            throughput: The list of throughput data for the test.
-            relative_attn: The list of attenuation data for the test.
-        """
-        iperf_flags = self.iperf_flags
-        if traffic_dir == "rx":
-            iperf_flags = "%s -R" % self.iperf_flags
-        starting_attn = self.starting_attn
-        ending_attn = self.ending_attn
-        step_size_in_db = self.step_size_in_db
-        if reverse:
-            starting_attn = self.ending_attn
-            ending_attn = self.starting_attn
-            step_size_in_db = step_size_in_db * -1
-            self.dut.disconnect()
-        for step in range(starting_attn, ending_attn, step_size_in_db):
-            try:
-                for attenuator in rvr_attenuators:
-                    attenuator.set_atten(step)
-            except ValueError as e:
-                self.log.error(
-                    f"{step} is beyond the max or min of the testbed "
-                    f"attenuator's capability. Stopping. {e}"
-                )
-                break
-            self.log.info("Set relative attenuation to %s db" % step)
-
-            associated = self.dut.is_connected()
-            if associated:
-                self.log.info("DUT is currently associated.")
-            else:
-                self.log.info("DUT is not currently associated.")
-
-            if reverse:
-                if not associated:
-                    self.log.info(
-                        "Trying to associate at relative " "attenuation of %s db" % step
-                    )
-                    if self.dut.associate(
-                        ssid,
-                        target_pwd=password,
-                        target_security=hostapd_constants.SECURITY_STRING_TO_DEFAULT_TARGET_SECURITY.get(
-                            security_mode
-                        ),
-                        check_connectivity=False,
-                    ):
-                        associated = True
-                        self.log.info("Successfully associated.")
-                    else:
-                        associated = False
-                        self.log.info(
-                            "Association failed. Marking a 0 %s for"
-                            " throughput. Skipping running traffic."
-                            % REPORTING_SPEED_UNITS
-                        )
-            attn_value_inserted = False
-            value_to_insert = str(step)
-            while not attn_value_inserted:
-                if value_to_insert in relative_attn:
-                    value_to_insert = "%s " % value_to_insert
-                else:
-                    relative_attn.append(value_to_insert)
-                    attn_value_inserted = True
-
-            dut_ip_addresses = self.dut.device.get_interface_ip_addresses(
-                self.dut_iperf_client.test_interface
-            )
-            if ip_version == 4:
-                if not dut_ip_addresses["ipv4_private"]:
-                    self.log.info(
-                        "DUT does not have an IPv4 address. "
-                        "Traffic attempt to be run if the server "
-                        "is pingable."
-                    )
-                else:
-                    self.log.info(
-                        'DUT has the following IPv4 address: "%s"'
-                        % dut_ip_addresses["ipv4_private"][0]
-                    )
-            elif ip_version == 6:
-                if not dut_ip_addresses["ipv6_private_local"]:
-                    self.log.info(
-                        "DUT does not have an IPv6 address. "
-                        "Traffic attempt to be run if the server "
-                        "is pingable."
-                    )
-                else:
-                    self.log.info(
-                        'DUT has the following IPv6 address: "%s"'
-                        % dut_ip_addresses["ipv6_private_local"][0]
-                    )
-            server_pingable = self.dut.can_ping(iperf_server_ip_address)
-            if not server_pingable:
-                self.log.info(
-                    'Iperf server "%s" is not pingable. Marking '
-                    "a 0 %s for throughput. Skipping running "
-                    "traffic." % (iperf_server_ip_address, REPORTING_SPEED_UNITS)
-                )
-            else:
-                self.log.info(
-                    'Iperf server "%s" is pingable.' % iperf_server_ip_address
-                )
-            if self.debug_pre_traffic_cmd:
-                self.log.info(
-                    "\nDEBUG: Sending command '%s' to DUT" % self.debug_pre_traffic_cmd
-                )
-                self.log.info(
-                    "\n%s" % self.dut.send_command(self.debug_pre_traffic_cmd)
-                )
-            if server_pingable:
-                if traffic_dir == "tx":
-                    self.log.info(
-                        "Running traffic DUT to %s at relative "
-                        "attenuation of %s" % (iperf_server_ip_address, step)
-                    )
-                elif traffic_dir == "rx":
-                    self.log.info(
-                        "Running traffic %s to DUT at relative "
-                        "attenuation of %s" % (iperf_server_ip_address, step)
-                    )
-                else:
-                    raise ValueError("Invalid traffic direction")
-                try:
-                    iperf_tag = "decreasing"
-                    if reverse:
-                        iperf_tag = "increasing"
-                    iperf_results_file = self.dut_iperf_client.start(
-                        iperf_server_ip_address,
-                        iperf_flags,
-                        "%s_%s_%s" % (iperf_tag, traffic_dir, self.starting_attn),
-                        timeout=(self.dwell_time_in_secs * 2),
-                    )
-                except TimeoutError as e:
-                    iperf_results_file = None
-                    self.log.error(
-                        f"Iperf traffic timed out. Marking 0 {REPORTING_SPEED_UNITS} for "
-                        f"throughput. {e}"
-                    )
-
-                if not iperf_results_file:
-                    throughput.append(0)
-                else:
-                    try:
-                        iperf_results = IPerfResult(
-                            iperf_results_file,
-                            reporting_speed_units=REPORTING_SPEED_UNITS,
-                        )
-                        if iperf_results.error:
-                            self.iperf_server.stop()
-                            self.iperf_server.start()
-                            self.log.error(
-                                f"Errors in iperf logs:\n{iperf_results.error}"
-                            )
-                        if not iperf_results.avg_send_rate:
-                            throughput.append(0)
-                        else:
-                            throughput.append(iperf_results.avg_send_rate)
-                    except ValueError as e:
-                        self.iperf_server.stop()
-                        self.iperf_server.start()
-                        self.log.error(
-                            f"No data in iPerf3 file. Marking 0 {REPORTING_SPEED_UNITS} "
-                            f"for throughput: {e}"
-                        )
-                        throughput.append(0)
-                    except Exception as e:
-                        self.iperf_server.stop()
-                        self.iperf_server.start()
-                        self.log.error(
-                            f"Unknown exception. Marking 0 {REPORTING_SPEED_UNITS} for "
-                            f"throughput: {e}"
-                        )
-                        self.log.error(e)
-                        throughput.append(0)
-
-                self.log.info(
-                    "Iperf traffic complete. %s traffic received at "
-                    "%s %s at relative attenuation of %s db"
-                    % (
-                        traffic_dir,
-                        throughput[-1],
-                        REPORTING_SPEED_UNITS,
-                        str(relative_attn[-1]).strip(),
-                    )
-                )
-
-            else:
-                self.log.debug("DUT Associated: %s" % associated)
-                self.log.debug(
-                    "%s pingable: %s" % (iperf_server_ip_address, server_pingable)
-                )
-                throughput.append(0)
-            if self.debug_post_traffic_cmd:
-                self.log.info(
-                    "\nDEBUG: Sending command '%s' to DUT" % self.debug_post_traffic_cmd
-                )
-                self.log.info(
-                    "\n%s" % self.dut.send_command(self.debug_post_traffic_cmd)
-                )
-        return throughput, relative_attn
-
-    def test_rvr_11ac_5g_80mhz_open_tx_ipv4(self):
-        ssid = rand_ascii_str(20)
-        setup_ap(
-            access_point=self.access_point,
-            profile_name="whirlwind",
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            ssid=ssid,
-            setup_bridge=True,
-        )
-        graph_data = self.run_rvr(ssid, band="5g", traffic_dir="tx", ip_version=4)
-        for rvr_graph in create_rvr_graph(
-            self.test_name,
-            context.get_current_context().get_full_output_path(),
-            graph_data,
-        ):
-            self.rvr_graph_summary.append(rvr_graph)
-        write_csv_rvr_data(
-            self.test_name,
-            context.get_current_context().get_full_output_path(),
-            graph_data,
-        )
-
-    def test_rvr_11ac_5g_80mhz_open_rx_ipv4(self):
-        ssid = rand_ascii_str(20)
-        setup_ap(
-            access_point=self.access_point,
-            profile_name="whirlwind",
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            ssid=ssid,
-            setup_bridge=True,
-        )
-        graph_data = self.run_rvr(ssid, band="5g", traffic_dir="rx", ip_version=4)
-        for rvr_graph in create_rvr_graph(
-            self.test_name,
-            context.get_current_context().get_full_output_path(),
-            graph_data,
-        ):
-            self.rvr_graph_summary.append(rvr_graph)
-        write_csv_rvr_data(
-            self.test_name,
-            context.get_current_context().get_full_output_path(),
-            graph_data,
-        )
-
-    def test_rvr_11ac_5g_80mhz_open_tx_ipv6(self):
-        ssid = rand_ascii_str(20)
-        setup_ap(
-            access_point=self.access_point,
-            profile_name="whirlwind",
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            ssid=ssid,
-            setup_bridge=True,
-        )
-        graph_data = self.run_rvr(ssid, band="5g", traffic_dir="tx", ip_version=6)
-        for rvr_graph in create_rvr_graph(
-            self.test_name,
-            context.get_current_context().get_full_output_path(),
-            graph_data,
-        ):
-            self.rvr_graph_summary.append(rvr_graph)
-        write_csv_rvr_data(
-            self.test_name,
-            context.get_current_context().get_full_output_path(),
-            graph_data,
-        )
-
-    def test_rvr_11ac_5g_80mhz_open_rx_ipv6(self):
-        ssid = rand_ascii_str(20)
-        setup_ap(
-            access_point=self.access_point,
-            profile_name="whirlwind",
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            ssid=ssid,
-            setup_bridge=True,
-        )
-        graph_data = self.run_rvr(ssid, band="5g", traffic_dir="rx", ip_version=6)
-        for rvr_graph in create_rvr_graph(
-            self.test_name,
-            context.get_current_context().get_full_output_path(),
-            graph_data,
-        ):
-            self.rvr_graph_summary.append(rvr_graph)
-        write_csv_rvr_data(
-            self.test_name,
-            context.get_current_context().get_full_output_path(),
-            graph_data,
-        )
-
-    def test_rvr_11ac_5g_80mhz_wpa2_tx_ipv4(self):
-        ssid = rand_ascii_str(20)
-        password = rand_ascii_str(20)
-        security_profile = Security(security_mode="wpa2", password=password)
-        setup_ap(
-            access_point=self.access_point,
-            profile_name="whirlwind",
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            ssid=ssid,
-            security=security_profile,
-            setup_bridge=True,
-        )
-        graph_data = self.run_rvr(
-            ssid,
-            security_mode="wpa2",
-            password=password,
-            band="5g",
-            traffic_dir="tx",
-            ip_version=4,
-        )
-        for rvr_graph in create_rvr_graph(
-            self.test_name,
-            context.get_current_context().get_full_output_path(),
-            graph_data,
-        ):
-            self.rvr_graph_summary.append(rvr_graph)
-        write_csv_rvr_data(
-            self.test_name,
-            context.get_current_context().get_full_output_path(),
-            graph_data,
-        )
-
-    def test_rvr_11ac_5g_80mhz_wpa2_rx_ipv4(self):
-        ssid = rand_ascii_str(20)
-        password = rand_ascii_str(20)
-        security_profile = Security(security_mode="wpa2", password=password)
-        setup_ap(
-            access_point=self.access_point,
-            profile_name="whirlwind",
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            ssid=ssid,
-            security=security_profile,
-            setup_bridge=True,
-        )
-        graph_data = self.run_rvr(
-            ssid,
-            security_mode="wpa2",
-            password=password,
-            band="5g",
-            traffic_dir="rx",
-            ip_version=4,
-        )
-        for rvr_graph in create_rvr_graph(
-            self.test_name,
-            context.get_current_context().get_full_output_path(),
-            graph_data,
-        ):
-            self.rvr_graph_summary.append(rvr_graph)
-        write_csv_rvr_data(
-            self.test_name,
-            context.get_current_context().get_full_output_path(),
-            graph_data,
-        )
-
-    def test_rvr_11ac_5g_80mhz_wpa2_tx_ipv6(self):
-        ssid = rand_ascii_str(20)
-        password = rand_ascii_str(20)
-        security_profile = Security(security_mode="wpa2", password=password)
-        setup_ap(
-            access_point=self.access_point,
-            profile_name="whirlwind",
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            ssid=ssid,
-            security=security_profile,
-            setup_bridge=True,
-        )
-        graph_data = self.run_rvr(
-            ssid,
-            security_mode="wpa2",
-            password=password,
-            band="5g",
-            traffic_dir="tx",
-            ip_version=6,
-        )
-        for rvr_graph in create_rvr_graph(
-            self.test_name,
-            context.get_current_context().get_full_output_path(),
-            graph_data,
-        ):
-            self.rvr_graph_summary.append(rvr_graph)
-        write_csv_rvr_data(
-            self.test_name,
-            context.get_current_context().get_full_output_path(),
-            graph_data,
-        )
-
-    def test_rvr_11ac_5g_80mhz_wpa2_rx_ipv6(self):
-        ssid = rand_ascii_str(20)
-        password = rand_ascii_str(20)
-        security_profile = Security(security_mode="wpa2", password=password)
-        setup_ap(
-            access_point=self.access_point,
-            profile_name="whirlwind",
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            ssid=ssid,
-            security=security_profile,
-            setup_bridge=True,
-        )
-        graph_data = self.run_rvr(
-            ssid,
-            security_mode="wpa2",
-            password=password,
-            band="5g",
-            traffic_dir="rx",
-            ip_version=6,
-        )
-        for rvr_graph in create_rvr_graph(
-            self.test_name,
-            context.get_current_context().get_full_output_path(),
-            graph_data,
-        ):
-            self.rvr_graph_summary.append(rvr_graph)
-        write_csv_rvr_data(
-            self.test_name,
-            context.get_current_context().get_full_output_path(),
-            graph_data,
-        )
-
-    def test_rvr_11n_2g_20mhz_open_tx_ipv4(self):
-        ssid = rand_ascii_str(20)
-        setup_ap(
-            access_point=self.access_point,
-            profile_name="whirlwind",
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            ssid=ssid,
-            setup_bridge=True,
-        )
-        graph_data = self.run_rvr(ssid, band="2g", traffic_dir="tx", ip_version=4)
-        for rvr_graph in create_rvr_graph(
-            self.test_name,
-            context.get_current_context().get_full_output_path(),
-            graph_data,
-        ):
-            self.rvr_graph_summary.append(rvr_graph)
-        write_csv_rvr_data(
-            self.test_name,
-            context.get_current_context().get_full_output_path(),
-            graph_data,
-        )
-
-    def test_rvr_11n_2g_20mhz_open_rx_ipv4(self):
-        ssid = rand_ascii_str(20)
-        setup_ap(
-            access_point=self.access_point,
-            profile_name="whirlwind",
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            ssid=ssid,
-            setup_bridge=True,
-        )
-        graph_data = self.run_rvr(ssid, band="2g", traffic_dir="rx", ip_version=4)
-        for rvr_graph in create_rvr_graph(
-            self.test_name,
-            context.get_current_context().get_full_output_path(),
-            graph_data,
-        ):
-            self.rvr_graph_summary.append(rvr_graph)
-        write_csv_rvr_data(
-            self.test_name,
-            context.get_current_context().get_full_output_path(),
-            graph_data,
-        )
-
-    def test_rvr_11n_2g_20mhz_open_tx_ipv6(self):
-        ssid = rand_ascii_str(20)
-        setup_ap(
-            access_point=self.access_point,
-            profile_name="whirlwind",
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            ssid=ssid,
-            setup_bridge=True,
-        )
-        graph_data = self.run_rvr(ssid, band="2g", traffic_dir="tx", ip_version=6)
-        for rvr_graph in create_rvr_graph(
-            self.test_name,
-            context.get_current_context().get_full_output_path(),
-            graph_data,
-        ):
-            self.rvr_graph_summary.append(rvr_graph)
-        write_csv_rvr_data(
-            self.test_name,
-            context.get_current_context().get_full_output_path(),
-            graph_data,
-        )
-
-    def test_rvr_11n_2g_20mhz_open_rx_ipv6(self):
-        ssid = rand_ascii_str(20)
-        setup_ap(
-            access_point=self.access_point,
-            profile_name="whirlwind",
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            ssid=ssid,
-            setup_bridge=True,
-        )
-        graph_data = self.run_rvr(ssid, band="2g", traffic_dir="rx", ip_version=6)
-        for rvr_graph in create_rvr_graph(
-            self.test_name,
-            context.get_current_context().get_full_output_path(),
-            graph_data,
-        ):
-            self.rvr_graph_summary.append(rvr_graph)
-        write_csv_rvr_data(
-            self.test_name,
-            context.get_current_context().get_full_output_path(),
-            graph_data,
-        )
-
-    def test_rvr_11n_2g_20mhz_wpa2_tx_ipv4(self):
-        ssid = rand_ascii_str(20)
-        password = rand_ascii_str(20)
-        security_profile = Security(security_mode="wpa2", password=password)
-        setup_ap(
-            access_point=self.access_point,
-            profile_name="whirlwind",
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            ssid=ssid,
-            security=security_profile,
-            setup_bridge=True,
-        )
-        graph_data = self.run_rvr(
-            ssid,
-            security_mode="wpa2",
-            password=password,
-            band="2g",
-            traffic_dir="tx",
-            ip_version=4,
-        )
-        for rvr_graph in create_rvr_graph(
-            self.test_name,
-            context.get_current_context().get_full_output_path(),
-            graph_data,
-        ):
-            self.rvr_graph_summary.append(rvr_graph)
-        write_csv_rvr_data(
-            self.test_name,
-            context.get_current_context().get_full_output_path(),
-            graph_data,
-        )
-
-    def test_rvr_11n_2g_20mhz_wpa2_rx_ipv4(self):
-        ssid = rand_ascii_str(20)
-        password = rand_ascii_str(20)
-        security_profile = Security(security_mode="wpa2", password=password)
-        setup_ap(
-            access_point=self.access_point,
-            profile_name="whirlwind",
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            ssid=ssid,
-            security=security_profile,
-            setup_bridge=True,
-        )
-        graph_data = self.run_rvr(
-            ssid,
-            security_mode="wpa2",
-            password=password,
-            band="2g",
-            traffic_dir="rx",
-            ip_version=4,
-        )
-        for rvr_graph in create_rvr_graph(
-            self.test_name,
-            context.get_current_context().get_full_output_path(),
-            graph_data,
-        ):
-            self.rvr_graph_summary.append(rvr_graph)
-        write_csv_rvr_data(
-            self.test_name,
-            context.get_current_context().get_full_output_path(),
-            graph_data,
-        )
-
-    def test_rvr_11n_2g_20mhz_wpa2_tx_ipv6(self):
-        ssid = rand_ascii_str(20)
-        password = rand_ascii_str(20)
-        security_profile = Security(security_mode="wpa2", password=password)
-        setup_ap(
-            access_point=self.access_point,
-            profile_name="whirlwind",
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            ssid=ssid,
-            security=security_profile,
-            setup_bridge=True,
-        )
-        graph_data = self.run_rvr(
-            ssid,
-            security_mode="wpa2",
-            password=password,
-            band="2g",
-            traffic_dir="tx",
-            ip_version=6,
-        )
-        for rvr_graph in create_rvr_graph(
-            self.test_name,
-            context.get_current_context().get_full_output_path(),
-            graph_data,
-        ):
-            self.rvr_graph_summary.append(rvr_graph)
-        write_csv_rvr_data(
-            self.test_name,
-            context.get_current_context().get_full_output_path(),
-            graph_data,
-        )
-
-    def test_rvr_11n_2g_20mhz_wpa2_rx_ipv6(self):
-        ssid = rand_ascii_str(20)
-        password = rand_ascii_str(20)
-        security_profile = Security(security_mode="wpa2", password=password)
-        setup_ap(
-            access_point=self.access_point,
-            profile_name="whirlwind",
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            ssid=ssid,
-            security=security_profile,
-            setup_bridge=True,
-        )
-        graph_data = self.run_rvr(
-            ssid,
-            security_mode="wpa2",
-            password=password,
-            band="2g",
-            traffic_dir="rx",
-            ip_version=6,
-        )
-        for rvr_graph in create_rvr_graph(
-            self.test_name,
-            context.get_current_context().get_full_output_path(),
-            graph_data,
-        ):
-            self.rvr_graph_summary.append(rvr_graph)
-        write_csv_rvr_data(
-            self.test_name,
-            context.get_current_context().get_full_output_path(),
-            graph_data,
-        )
-
-
-if __name__ == "__main__":
-    test_runner.main()
diff --git a/src/antlion/tests/wlan/performance/channel_sweep_test_params.yaml b/src/antlion/tests/wlan/performance/channel_sweep_test_params.yaml
deleted file mode 100644
index b70490b..0000000
--- a/src/antlion/tests/wlan/performance/channel_sweep_test_params.yaml
+++ /dev/null
@@ -1,5408 +0,0 @@
-channel_sweep_test_params:
-  skip_performance: false
-  debug_channel_performance_tests:
-    - test_name: test_random_2g_20mhz_channel
-      test_channels:
-        - 8
-      test_channel_bandwidth: 20
-    - test_name: test_random_dfs_5g_80mhz_channel
-      test_channels:
-        - 100
-      test_channel_bandwidth: 80
-    - test_name: test_random_nondfs_5g_80mhz_channel
-      test_channels:
-        - 157
-      test_channel_bandwidth: 80
-    - test_name: test_channel_165
-      test_channels:
-        - 165
-      test_channel_bandwidth: 20
-  regulatory_compliance_tests:
-    - test_name: test_SE_regulatory_compliance
-      country_code: SE
-      test_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "14":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-        "144":
-          - 20
-          - 40
-          - 80
-        "149":
-          - 20
-          - 40
-          - 80
-        "153":
-          - 20
-          - 40
-          - 80
-        "157":
-          - 20
-          - 40
-          - 80
-        "161":
-          - 20
-          - 40
-          - 80
-        "165":
-          - 20
-      allowed_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-    - test_name: test_us_regulatory_compliance
-      country_code: US
-      test_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "14":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-        "144":
-          - 20
-          - 40
-          - 80
-        "149":
-          - 20
-          - 40
-          - 80
-        "153":
-          - 20
-          - 40
-          - 80
-        "157":
-          - 20
-          - 40
-          - 80
-        "161":
-          - 20
-          - 40
-          - 80
-        "165":
-          - 20
-      allowed_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-        "144":
-          - 20
-          - 40
-          - 80
-        "149":
-          - 20
-          - 40
-          - 80
-        "153":
-          - 20
-          - 40
-          - 80
-        "157":
-          - 20
-          - 40
-          - 80
-        "161":
-          - 20
-          - 40
-          - 80
-        "165":
-          - 20
-    - test_name: test_MX_regulatory_compliance
-      country_code: MX
-      test_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "14":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-        "144":
-          - 20
-          - 40
-          - 80
-        "149":
-          - 20
-          - 40
-          - 80
-        "153":
-          - 20
-          - 40
-          - 80
-        "157":
-          - 20
-          - 40
-          - 80
-        "161":
-          - 20
-          - 40
-          - 80
-        "165":
-          - 20
-      allowed_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-        "144":
-          - 20
-          - 40
-          - 80
-        "149":
-          - 20
-          - 40
-          - 80
-        "153":
-          - 20
-          - 40
-          - 80
-        "157":
-          - 20
-          - 40
-          - 80
-        "161":
-          - 20
-          - 40
-          - 80
-        "165":
-          - 20
-    - test_name: test_CA_regulatory_compliance
-      country_code: CA
-      test_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "14":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-        "144":
-          - 20
-          - 40
-          - 80
-        "149":
-          - 20
-          - 40
-          - 80
-        "153":
-          - 20
-          - 40
-          - 80
-        "157":
-          - 20
-          - 40
-          - 80
-        "161":
-          - 20
-          - 40
-          - 80
-        "165":
-          - 20
-      allowed_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-        "144":
-          - 20
-          - 40
-          - 80
-        "149":
-          - 20
-          - 40
-          - 80
-        "153":
-          - 20
-          - 40
-          - 80
-        "157":
-          - 20
-          - 40
-          - 80
-        "161":
-          - 20
-          - 40
-          - 80
-        "165":
-          - 20
-    - test_name: test_IN_regulatory_compliance
-      country_code: IN
-      test_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "14":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-        "144":
-          - 20
-          - 40
-          - 80
-        "149":
-          - 20
-          - 40
-          - 80
-        "153":
-          - 20
-          - 40
-          - 80
-        "157":
-          - 20
-          - 40
-          - 80
-        "161":
-          - 20
-          - 40
-          - 80
-        "165":
-          - 20
-      allowed_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-        "144":
-          - 20
-          - 40
-          - 80
-        "149":
-          - 20
-          - 40
-          - 80
-        "153":
-          - 20
-          - 40
-          - 80
-        "157":
-          - 20
-          - 40
-          - 80
-        "161":
-          - 20
-          - 40
-          - 80
-        "165":
-          - 20
-    - test_name: test_NZ_regulatory_compliance
-      country_code: NZ
-      test_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "14":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-        "144":
-          - 20
-          - 40
-          - 80
-        "149":
-          - 20
-          - 40
-          - 80
-        "153":
-          - 20
-          - 40
-          - 80
-        "157":
-          - 20
-          - 40
-          - 80
-        "161":
-          - 20
-          - 40
-          - 80
-        "165":
-          - 20
-      allowed_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-        "144":
-          - 20
-          - 40
-          - 80
-        "149":
-          - 20
-          - 40
-          - 80
-        "153":
-          - 20
-          - 40
-          - 80
-        "157":
-          - 20
-          - 40
-          - 80
-        "161":
-          - 20
-          - 40
-          - 80
-        "165":
-          - 20
-    - test_name: test_GB_regulatory_compliance
-      country_code: GB
-      test_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "14":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-        "144":
-          - 20
-          - 40
-          - 80
-        "149":
-          - 20
-          - 40
-          - 80
-        "153":
-          - 20
-          - 40
-          - 80
-        "157":
-          - 20
-          - 40
-          - 80
-        "161":
-          - 20
-          - 40
-          - 80
-        "165":
-          - 20
-      allowed_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-    - test_name: test_AU_regulatory_compliance
-      country_code: AU
-      test_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "14":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-        "144":
-          - 20
-          - 40
-          - 80
-        "149":
-          - 20
-          - 40
-          - 80
-        "153":
-          - 20
-          - 40
-          - 80
-        "157":
-          - 20
-          - 40
-          - 80
-        "161":
-          - 20
-          - 40
-          - 80
-        "165":
-          - 20
-      allowed_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-        "144":
-          - 20
-          - 40
-          - 80
-        "149":
-          - 20
-          - 40
-          - 80
-        "153":
-          - 20
-          - 40
-          - 80
-        "157":
-          - 20
-          - 40
-          - 80
-        "161":
-          - 20
-          - 40
-          - 80
-        "165":
-          - 20
-    - test_name: test_JP_regulatory_compliance
-      country_code: JP
-      test_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "14":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-        "144":
-          - 20
-          - 40
-          - 80
-        "149":
-          - 20
-          - 40
-          - 80
-        "153":
-          - 20
-          - 40
-          - 80
-        "157":
-          - 20
-          - 40
-          - 80
-        "161":
-          - 20
-          - 40
-          - 80
-        "165":
-          - 20
-      allowed_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-        "144":
-          - 20
-          - 40
-          - 80
-    - test_name: test_FR_regulatory_compliance
-      country_code: FR
-      test_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "14":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-        "144":
-          - 20
-          - 40
-          - 80
-        "149":
-          - 20
-          - 40
-          - 80
-        "153":
-          - 20
-          - 40
-          - 80
-        "157":
-          - 20
-          - 40
-          - 80
-        "161":
-          - 20
-          - 40
-          - 80
-        "165":
-          - 20
-      allowed_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-    - test_name: test_DE_regulatory_compliance
-      country_code: DE
-      test_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "14":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-        "144":
-          - 20
-          - 40
-          - 80
-        "149":
-          - 20
-          - 40
-          - 80
-        "153":
-          - 20
-          - 40
-          - 80
-        "157":
-          - 20
-          - 40
-          - 80
-        "161":
-          - 20
-          - 40
-          - 80
-        "165":
-          - 20
-      allowed_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-    - test_name: test_CH_regulatory_compliance
-      country_code: CH
-      test_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "14":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-        "144":
-          - 20
-          - 40
-          - 80
-        "149":
-          - 20
-          - 40
-          - 80
-        "153":
-          - 20
-          - 40
-          - 80
-        "157":
-          - 20
-          - 40
-          - 80
-        "161":
-          - 20
-          - 40
-          - 80
-        "165":
-          - 20
-      allowed_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-    - test_name: test_BE_regulatory_compliance
-      country_code: BE
-      test_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "14":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-        "144":
-          - 20
-          - 40
-          - 80
-        "149":
-          - 20
-          - 40
-          - 80
-        "153":
-          - 20
-          - 40
-          - 80
-        "157":
-          - 20
-          - 40
-          - 80
-        "161":
-          - 20
-          - 40
-          - 80
-        "165":
-          - 20
-      allowed_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-    - test_name: test_IE_regulatory_compliance
-      country_code: IE
-      test_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "14":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-        "144":
-          - 20
-          - 40
-          - 80
-        "149":
-          - 20
-          - 40
-          - 80
-        "153":
-          - 20
-          - 40
-          - 80
-        "157":
-          - 20
-          - 40
-          - 80
-        "161":
-          - 20
-          - 40
-          - 80
-        "165":
-          - 20
-      allowed_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-    - test_name: test_NO_regulatory_compliance
-      country_code: "NO"
-      test_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "14":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-        "144":
-          - 20
-          - 40
-          - 80
-        "149":
-          - 20
-          - 40
-          - 80
-        "153":
-          - 20
-          - 40
-          - 80
-        "157":
-          - 20
-          - 40
-          - 80
-        "161":
-          - 20
-          - 40
-          - 80
-        "165":
-          - 20
-      allowed_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-    - test_name: test_ES_regulatory_compliance
-      country_code: ES
-      test_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "14":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-        "144":
-          - 20
-          - 40
-          - 80
-        "149":
-          - 20
-          - 40
-          - 80
-        "153":
-          - 20
-          - 40
-          - 80
-        "157":
-          - 20
-          - 40
-          - 80
-        "161":
-          - 20
-          - 40
-          - 80
-        "165":
-          - 20
-      allowed_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-    - test_name: test_IT_regulatory_compliance
-      country_code: IT
-      test_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "14":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-        "144":
-          - 20
-          - 40
-          - 80
-        "149":
-          - 20
-          - 40
-          - 80
-        "153":
-          - 20
-          - 40
-          - 80
-        "157":
-          - 20
-          - 40
-          - 80
-        "161":
-          - 20
-          - 40
-          - 80
-        "165":
-          - 20
-      allowed_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-    - test_name: test_NL_regulatory_compliance
-      country_code: NL
-      test_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "14":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-        "144":
-          - 20
-          - 40
-          - 80
-        "149":
-          - 20
-          - 40
-          - 80
-        "153":
-          - 20
-          - 40
-          - 80
-        "157":
-          - 20
-          - 40
-          - 80
-        "161":
-          - 20
-          - 40
-          - 80
-        "165":
-          - 20
-      allowed_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-    - test_name: test_DK_regulatory_compliance
-      country_code: DK
-      test_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "14":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-        "144":
-          - 20
-          - 40
-          - 80
-        "149":
-          - 20
-          - 40
-          - 80
-        "153":
-          - 20
-          - 40
-          - 80
-        "157":
-          - 20
-          - 40
-          - 80
-        "161":
-          - 20
-          - 40
-          - 80
-        "165":
-          - 20
-      allowed_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-    - test_name: test_SG_regulatory_compliance
-      country_code: SG
-      test_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "14":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-        "144":
-          - 20
-          - 40
-          - 80
-        "149":
-          - 20
-          - 40
-          - 80
-        "153":
-          - 20
-          - 40
-          - 80
-        "157":
-          - 20
-          - 40
-          - 80
-        "161":
-          - 20
-          - 40
-          - 80
-        "165":
-          - 20
-      allowed_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-        "144":
-          - 20
-          - 40
-          - 80
-        "149":
-          - 20
-          - 40
-          - 80
-        "153":
-          - 20
-          - 40
-          - 80
-        "157":
-          - 20
-          - 40
-          - 80
-        "161":
-          - 20
-          - 40
-          - 80
-        "165":
-          - 20
-    - test_name: test_WW_regulatory_compliance
-      country_code: WW
-      test_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "14":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-        "144":
-          - 20
-          - 40
-          - 80
-        "149":
-          - 20
-          - 40
-          - 80
-        "153":
-          - 20
-          - 40
-          - 80
-        "157":
-          - 20
-          - 40
-          - 80
-        "161":
-          - 20
-          - 40
-          - 80
-        "165":
-          - 20
-      allowed_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-    - test_name: test_TW_regulatory_compliance
-      country_code: TW
-      test_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "14":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-        "144":
-          - 20
-          - 40
-          - 80
-        "149":
-          - 20
-          - 40
-          - 80
-        "153":
-          - 20
-          - 40
-          - 80
-        "157":
-          - 20
-          - 40
-          - 80
-        "161":
-          - 20
-          - 40
-          - 80
-        "165":
-          - 20
-      allowed_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-        "144":
-          - 20
-          - 40
-          - 80
-        "149":
-          - 20
-          - 40
-          - 80
-        "153":
-          - 20
-          - 40
-          - 80
-        "157":
-          - 20
-          - 40
-          - 80
-        "161":
-          - 20
-          - 40
-          - 80
-        "165":
-          - 20
-    - test_name: test_AT_regulatory_compliance
-      country_code: AT
-      test_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "14":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
-        "144":
-          - 20
-          - 40
-          - 80
-        "149":
-          - 20
-          - 40
-          - 80
-        "153":
-          - 20
-          - 40
-          - 80
-        "157":
-          - 20
-          - 40
-          - 80
-        "161":
-          - 20
-          - 40
-          - 80
-        "165":
-          - 20
-      allowed_channels:
-        "1":
-          - 20
-        "2":
-          - 20
-        "3":
-          - 20
-        "4":
-          - 20
-        "5":
-          - 20
-        "6":
-          - 20
-        "7":
-          - 20
-        "8":
-          - 20
-        "9":
-          - 20
-        "10":
-          - 20
-        "11":
-          - 20
-        "12":
-          - 20
-        "13":
-          - 20
-        "36":
-          - 20
-          - 40
-          - 80
-        "40":
-          - 20
-          - 40
-          - 80
-        "44":
-          - 20
-          - 40
-          - 80
-        "48":
-          - 20
-          - 40
-          - 80
-        "52":
-          - 20
-          - 40
-          - 80
-        "56":
-          - 20
-          - 40
-          - 80
-        "60":
-          - 20
-          - 40
-          - 80
-        "64":
-          - 20
-          - 40
-          - 80
-        "100":
-          - 20
-          - 40
-          - 80
-        "104":
-          - 20
-          - 40
-          - 80
-        "108":
-          - 20
-          - 40
-          - 80
-        "112":
-          - 20
-          - 40
-          - 80
-        "116":
-          - 20
-          - 40
-          - 80
-        "120":
-          - 20
-          - 40
-          - 80
-        "124":
-          - 20
-          - 40
-          - 80
-        "128":
-          - 20
-          - 40
-          - 80
-        "132":
-          - 20
-          - 40
-          - 80
-        "136":
-          - 20
-          - 40
-          - 80
-        "140":
-          - 20
-          - 40
-          - 80
diff --git a/src/antlion/tests/wlan_policy/HiddenNetworksTest.py b/src/antlion/tests/wlan_policy/HiddenNetworksTest.py
deleted file mode 100644
index 2928304..0000000
--- a/src/antlion/tests/wlan_policy/HiddenNetworksTest.py
+++ /dev/null
@@ -1,170 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants, hostapd_security
-from antlion.test_utils.wifi import base_test
-from antlion.utils import rand_ascii_str
-
-from mobly import signals, test_runner
-
-# These tests should have a longer timeout for connecting than normal connect
-# tests because the device should probabilistically perform active scans for
-# hidden networks. Multiple scans are necessary to verify a very low chance of
-# random failure.
-TIME_WAIT_FOR_CONNECT = 90
-TIME_ATTEMPT_SCANS = 90
-
-CONNECTIONS_ENABLED = "ConnectionsEnabled"
-CONNECTIONS_DISABLED = "ConnectionsDisabled"
-SECURITY_NONE = "none"
-WPA2 = "wpa2"
-
-
-class HiddenNetworksTest(base_test.WifiBaseTest):
-    """Tests that WLAN Policy will detect hidden networks
-
-    Test Bed Requirement:
-    * One or more Fuchsia devices
-    * One Access Point
-    """
-
-    def setup_class(self):
-        super().setup_class()
-        # Start an AP with a hidden network
-        self.hidden_ssid = rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
-        self.access_point = self.access_points[0]
-        self.hidden_password = rand_ascii_str(hostapd_constants.AP_PASSPHRASE_LENGTH_2G)
-        self.hidden_security = WPA2
-        security = hostapd_security.Security(
-            security_mode=self.hidden_security, password=self.hidden_password
-        )
-
-        self.access_point.stop_all_aps()
-        setup_ap(
-            self.access_point,
-            "whirlwind",
-            hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            self.hidden_ssid,
-            hidden=True,
-            security=security,
-        )
-
-        if len(self.fuchsia_devices) < 1:
-            raise EnvironmentError("No Fuchsia devices found.")
-        for fd in self.fuchsia_devices:
-            fd.configure_wlan(
-                association_mechanism="policy", preserve_saved_networks=True
-            )
-
-    def setup_test(self):
-        for fd in self.fuchsia_devices:
-            if not fd.wlan_policy_controller.remove_all_networks():
-                raise EnvironmentError("Failed to remove all networks in setup")
-
-    def teardown_class(self):
-        self.access_point.stop_all_aps()
-
-    def test_scan_hidden_networks(self):
-        # Scan a few times and check that we see the hidden networks in the
-        # results at least once. Even if hidden networks are scanned
-        # probabilistically, we should see it after a few tries.
-        for fd in self.fuchsia_devices:
-            # A hidden network must be saved to be found in scan results.
-            # Stop client connections to not trigger a connect when saving,
-            # which would interfere with requested scans.
-            fd.wlan_policy_controller.stop_client_connections()
-            if not fd.wlan_policy_controller.save_network(
-                self.hidden_ssid, self.hidden_security, password=self.hidden_password
-            ):
-                raise EnvironmentError("Failed to save network")
-            fd.wlan_policy_controller.start_client_connections()
-            start_time = time.time()
-            num_performed_scans = 0
-
-            while time.time() < start_time + TIME_ATTEMPT_SCANS:
-                num_performed_scans = num_performed_scans + 1
-                scan_result = fd.sl4f.wlan_policy_lib.wlanScanForNetworks()
-                if scan_result["error"] != None:
-                    self.log.warn(
-                        "Failed to scan for networks with error %s"
-                        % scan_result["error"]
-                    )
-                    continue
-                else:
-                    scans = scan_result["result"]
-                if self.hidden_ssid in scans:
-                    self.log.info(
-                        "SSID of hidden network seen after %d scans"
-                        % num_performed_scans
-                    )
-                    return
-                # Don't overload SL4F with scan requests
-                time.sleep(1)
-
-            self.log.error("Failed to see SSID after %d scans" % num_performed_scans)
-            raise signals.TestFailure("Failed to see hidden network in scans")
-
-    def test_auto_connect_hidden_on_startup(self):
-        """Test that if we are not connected to anything but have a hidden
-        network saved, we will eventually actively scan for it and connect."""
-        # Start up AP with an open network with a random SSID
-
-        for fd in self.fuchsia_devices:
-            # Test that we will auto connect without anything being triggered by
-            # saving a new network.
-            fd.wlan_policy_controller.stop_client_connections()
-
-            # Save the network.
-            if not fd.wlan_policy_controller.save_network(
-                self.hidden_ssid, self.hidden_security, password=self.hidden_password
-            ):
-                raise EnvironmentError("Failed to save network")
-
-            # Reboot the device and check that it auto connects.
-            fd.reboot()
-            if not fd.wlan_policy_controller.wait_for_connect(
-                self.hidden_ssid, self.hidden_security, timeout=TIME_WAIT_FOR_CONNECT
-            ):
-                raise signals.TestFailure("Failed to connect to network")
-
-    def test_auto_connect_hidden_on_save(self):
-        """Test that if we save a hidden network and are not connected to
-        anything, the device will connect to the hidden network that was
-        just saved."""
-        for fd in self.fuchsia_devices:
-            if not fd.wlan_policy_controller.wait_for_no_connections():
-                self.log.info(
-                    "Failed to get into a disconnected state to start the test"
-                )
-                raise EnvironmentError("Failed to disconnect all")
-
-            # Save the network and make sure that we see the device auto connect to it.
-            if not fd.wlan_policy_controller.save_network(
-                self.hidden_ssid, self.hidden_security, password=self.hidden_password
-            ):
-                raise EnvironmentError("Failed to save network")
-
-            if not fd.wlan_policy_controller.wait_for_connect(
-                self.hidden_ssid, self.hidden_security, timeout=TIME_WAIT_FOR_CONNECT
-            ):
-                raise signals.TestFailure("Failed to connect to network")
-
-
-if __name__ == "__main__":
-    test_runner.main()
diff --git a/src/antlion/tests/wlan_policy/PolicyScanTest.py b/src/antlion/tests/wlan_policy/PolicyScanTest.py
deleted file mode 100644
index 6df3291..0000000
--- a/src/antlion/tests/wlan_policy/PolicyScanTest.py
+++ /dev/null
@@ -1,256 +0,0 @@
-#!/usr/bin/env python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""This test exercises the Scan functionality for the WLAN Policy API."""
-
-from datetime import datetime
-
-from antlion.controllers.ap_lib import (
-    hostapd_ap_preset,
-    hostapd_bss_settings,
-    hostapd_constants,
-    hostapd_security,
-)
-from antlion.test_utils.wifi import base_test
-
-from mobly import signals, test_runner
-
-
-class PolicyScanTest(base_test.WifiBaseTest):
-    """WLAN policy scan test class.
-
-    Test Bed Requirement:
-    * One or more Fuchsia devices
-    * One Whirlwind Access Point
-    """
-
-    def setup_class(self):
-        super().setup_class()
-        if len(self.fuchsia_devices) < 1:
-            raise signals.TestFailure("No fuchsia devices found.")
-        for fd in self.fuchsia_devices:
-            fd.configure_wlan(
-                association_mechanism="policy", preserve_saved_networks=True
-            )
-        if len(self.access_points) < 1:
-            raise signals.TestFailure("No access points found.")
-        # Prepare the AP
-        self.access_point = self.access_points[0]
-        self.access_point.stop_all_aps()
-        # Generate network params.
-        bss_settings_2g = []
-        bss_settings_5g = []
-        open_network = self.get_open_network(False, [])
-        self.open_network_2g = open_network["2g"]
-        self.open_network_5g = open_network["5g"]
-        wpa2_settings = self.get_psk_network(False, [])
-        self.wpa2_network_2g = wpa2_settings["2g"]
-        self.wpa2_network_5g = wpa2_settings["5g"]
-        bss_settings_2g.append(
-            hostapd_bss_settings.BssSettings(
-                name=self.wpa2_network_2g["SSID"],
-                ssid=self.wpa2_network_2g["SSID"],
-                security=hostapd_security.Security(
-                    security_mode=self.wpa2_network_2g["security"],
-                    password=self.wpa2_network_2g["password"],
-                ),
-            )
-        )
-        bss_settings_5g.append(
-            hostapd_bss_settings.BssSettings(
-                name=self.wpa2_network_5g["SSID"],
-                ssid=self.wpa2_network_5g["SSID"],
-                security=hostapd_security.Security(
-                    security_mode=self.wpa2_network_5g["security"],
-                    password=self.wpa2_network_5g["password"],
-                ),
-            )
-        )
-        self.ap_2g = hostapd_ap_preset.create_ap_preset(
-            iface_wlan_2g=self.access_points[0].wlan_2g,
-            iface_wlan_5g=self.access_points[0].wlan_5g,
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
-            ssid=self.open_network_2g["SSID"],
-            bss_settings=bss_settings_2g,
-        )
-        self.ap_5g = hostapd_ap_preset.create_ap_preset(
-            iface_wlan_2g=self.access_points[0].wlan_2g,
-            iface_wlan_5g=self.access_points[0].wlan_5g,
-            channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            ssid=self.open_network_5g["SSID"],
-            bss_settings=bss_settings_5g,
-        )
-        # Start the networks
-        self.access_point.start_ap(hostapd_config=self.ap_2g)
-        self.access_point.start_ap(hostapd_config=self.ap_5g)
-        # Save the SSIDs
-        self.all_ssids = [
-            self.open_network_2g["SSID"],
-            self.wpa2_network_2g["SSID"],
-            self.open_network_5g["SSID"],
-            self.wpa2_network_5g["SSID"],
-        ]
-
-    def setup_test(self):
-        for fd in self.fuchsia_devices:
-            # stub for setting up all the fuchsia devices in the testbed.
-            return (
-                fd.wlan_policy_controller.remove_all_networks_and_wait_for_no_connections()
-            )
-
-    def teardown_test(self):
-        for fd in self.fuchsia_devices:
-            # stub until policy layer has something useful to use here.
-            pass
-
-    def teardown_class(self):
-        pass
-
-    def on_fail(self, test_name, begin_time):
-        for fd in self.fuchsia_devices:
-            try:
-                fd.take_bug_report(test_name, begin_time)
-                fd.get_log(test_name, begin_time)
-            except Exception:
-                pass
-
-            try:
-                if fd.device.hard_reboot_on_fail:
-                    fd.hard_power_cycle(self.pdu_devices)
-            except AttributeError:
-                pass
-
-    """Helper Functions"""
-
-    def perform_scan(self, fd):
-        """Initiates scan on a Fuchsia device and returns results
-
-        Args:
-            fd: A fuchsia device
-
-        Raises:
-            signals.TestFailure: if an error is reported by the device during
-            the scan
-
-        Returns:
-            A list of scan results
-        """
-        start_time = datetime.now()
-
-        scan_response = fd.sl4f.wlan_policy_lib.wlanScanForNetworks()
-
-        # first check if we received an error
-        if scan_response.get("error") is not None:
-            # the response indicates an error - log and raise failure
-            raise signals.TestFailure(
-                "Aborting test - scan failed with "
-                "error: %s" % scan_response.get("error")
-            )
-
-        # the scan command did not get an error response - go ahead
-        # and check for scan results
-        scan_results = scan_response["result"]
-        total_time_ms = (datetime.now() - start_time).total_seconds() * 1000
-
-        self.log.info("scan contained %d results", len(scan_results))
-        self.log.info("scan time: %d ms", total_time_ms)
-
-        return scan_results
-
-    def connect_to_network(self, wlan_network_params, fd):
-        """Connects the Fuchsia device to the specified network
-
-        Args:
-            wlan_network_params: A dictionary containing wlan information.
-            fd: A fuchsia device
-
-        Raises:
-            signals.TestFailure: if the device fails to connect
-        """
-        target_ssid = wlan_network_params["SSID"]
-        target_pwd = wlan_network_params.get("password")
-        target_security = wlan_network_params.get("security")
-
-        # TODO(mnck): use the Policy version of this call, when it is available.
-        connection_response = fd.wlan_policy_controller.save_and_connect(
-            target_ssid, target_security, password=target_pwd
-        )
-        if not connection_response:
-            raise signals.TestFailure("Aborting test - Connect call failed")
-        self.log.info("Network connection successful.")
-
-    def assert_network_is_in_results(self, scan_results, *, ssid):
-        """Verified scan results contain a specified network
-
-        Args:
-            scan_results: Scan results from a fuchsia Policy API scan
-            ssid: SSID for network that should be in the results
-
-        Raises:
-            signals.TestFailure: if the network is not present in the scan
-            results
-        """
-        if ssid not in scan_results:
-            raise signals.TestFailure(
-                'Network "%s" was not found in scan results: %s', ssid, scan_results
-            )
-
-    """Tests"""
-
-    def test_basic_scan_request(self):
-        """Verify a scan returns all expected networks"""
-        for fd in self.fuchsia_devices:
-            scan_results = self.perform_scan(fd)
-            if len(scan_results) == 0:
-                raise signals.TestFailure("Scan failed or did not " "find any networks")
-            for ssid in self.all_ssids:
-                self.assert_network_is_in_results(scan_results, ssid=ssid)
-
-    def test_scan_while_connected_open_network_2g(self):
-        """Connect to an open 2g network and perform a scan"""
-        for fd in self.fuchsia_devices:
-            self.connect_to_network(self.open_network_2g, fd)
-            scan_results = self.perform_scan(fd)
-            for ssid in self.all_ssids:
-                self.assert_network_is_in_results(scan_results, ssid=ssid)
-
-    def test_scan_while_connected_wpa2_network_2g(self):
-        """Connect to a WPA2 2g network and perform a scan"""
-        for fd in self.fuchsia_devices:
-            self.connect_to_network(self.wpa2_network_2g, fd)
-            scan_results = self.perform_scan(fd)
-            for ssid in self.all_ssids:
-                self.assert_network_is_in_results(scan_results, ssid=ssid)
-
-    def test_scan_while_connected_open_network_5g(self):
-        """Connect to an open 5g network and perform a scan"""
-        for fd in self.fuchsia_devices:
-            self.connect_to_network(self.open_network_5g, fd)
-            scan_results = self.perform_scan(fd)
-            for ssid in self.all_ssids:
-                self.assert_network_is_in_results(scan_results, ssid=ssid)
-
-    def test_scan_while_connected_wpa2_network_5g(self):
-        """Connect to a WPA2 5g network and perform a scan"""
-        for fd in self.fuchsia_devices:
-            self.connect_to_network(self.wpa2_network_5g, fd)
-            scan_results = self.perform_scan(fd)
-            for ssid in self.all_ssids:
-                self.assert_network_is_in_results(scan_results, ssid=ssid)
-
-
-if __name__ == "__main__":
-    test_runner.main()
diff --git a/src/antlion/tests/wlan_policy/RegulatoryRecoveryTest.py b/src/antlion/tests/wlan_policy/RegulatoryRecoveryTest.py
deleted file mode 100644
index 00c7a84..0000000
--- a/src/antlion/tests/wlan_policy/RegulatoryRecoveryTest.py
+++ /dev/null
@@ -1,187 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.test_utils.wifi import base_test
-
-from mobly import signals, test_runner
-
-
-class RegulatoryRecoveryTest(base_test.WifiBaseTest):
-    """Tests the policy layer's response to setting country code.
-
-    Test Bed Requirements:
-    * One Fuchsia device that is capable of operating as a WLAN client and AP.
-
-    Example Config:
-    "regulatory_recovery_test_params": {
-        "country_code": "US"
-    }
-
-    If no configuration information is provided, the test will default to
-    toggling between WW and US.
-    """
-
-    def setup_class(self):
-        super().setup_class()
-        if len(self.fuchsia_devices) < 1:
-            raise EnvironmentError("No Fuchsia devices found.")
-
-        self.config_test_params = self.user_params.get(
-            "regulatory_recovery_test_params", {}
-        )
-        self.country_code = self.config_test_params.get("country_code", "US")
-        self.negative_test = self.config_test_params.get("negative_test", False)
-
-        for fd in self.fuchsia_devices:
-            fd.configure_wlan(association_mechanism="policy")
-
-    def teardown_class(self):
-        if not self.negative_test:
-            for fd in self.fuchsia_devices:
-                fd.wlan_controller.set_country_code(self.country_code)
-
-        super().teardown_class()
-
-    def setup_test(self):
-        for fd in self.fuchsia_devices:
-            # Remove all network configs.  These tests do not require
-            # connecting to a network, they merely verify whether or not client
-            # mode is functional.
-            if not fd.wlan_policy_controller.remove_all_networks():
-                raise EnvironmentError("Failed to remove all networks in setup")
-
-            # To ensure that DUTs are initially in a known state, set all of
-            # their PHYs to world-wide mode.  Also disable client and AP
-            # functionality so that there is no automated WLAN behavior.
-            fd.wlan_controller.set_country_code("WW")
-            fd.wlan_policy_controller.stop_client_connections()
-            fd.sl4f.wlan_ap_policy_lib.wlanStopAllAccessPoint()
-
-    def set_country_code(self, fd):
-        try:
-            fd.wlan_controller.set_country_code(self.country_code)
-        except EnvironmentError as e:
-            if self.negative_test:
-                # In the negative case, setting the country code for an
-                # invalid country should fail.
-                pass
-            else:
-                # If this is not a negative test case, re-raise the
-                # exception.
-                raise e
-        else:
-            # The negative test case should have failed to set the country
-            # code and the positive test case should succeed.
-            if self.negative_test:
-                raise EnvironmentError("Setting invalid country code succeeded.")
-            else:
-                pass
-
-    def test_interfaces_not_recreated_when_initially_disabled(self):
-        """This test ensures that after a new regulatory region is applied
-        while client connections and access points are disabled, no new
-        interfaces are automatically recreated.
-        """
-        for fd in self.fuchsia_devices:
-            # Set the region code.
-            self.set_country_code(fd)
-
-            # Reset the listeners and verify the current state.
-            fd.sl4f.wlan_policy_lib.wlanSetNewListener()
-            fd.sl4f.wlan_ap_policy_lib.wlanSetNewListener()
-
-            # Verify that the client and AP are still stopped.
-            client_state = fd.sl4f.wlan_policy_lib.wlanGetUpdate()
-            if client_state["error"]:
-                raise signals.TestFailure(
-                    "error querying client state: {}".format(client_state["error"])
-                )
-            elif client_state["result"]["state"] != "ConnectionsDisabled":
-                raise signals.TestFailure(
-                    "client connections in unexpected state: {}".format(
-                        client_state["result"]["state"]
-                    )
-                )
-
-            ap_state = fd.sl4f.wlan_ap_policy_lib.wlanGetUpdate()
-            if ap_state["error"]:
-                raise signals.TestFailure(
-                    "error querying AP state: {}".format(ap_state["error"])
-                )
-
-            ap_updates = ap_state["result"]
-            if ap_updates:
-                raise signals.TestFailure(
-                    "AP in unexpected state: {}".format(ap_updates)
-                )
-
-    def test_interfaces_recreated_when_initially_enabled(self):
-        """This test ensures that after a new regulatory region is applied
-        while client connections and access points are enabled, all
-        interfaces are recreated.
-        """
-        test_ssid = "test_ssid"
-        test_security_type = "none"
-        for fd in self.fuchsia_devices:
-            # Start client connections and start an AP before setting the
-            # country code.
-            fd.wlan_policy_controller.start_client_connections()
-            fd.sl4f.wlan_ap_policy_lib.wlanStartAccessPoint(
-                test_ssid, test_security_type, "", "local_only", "any"
-            )
-
-            # Set the country code.
-            self.set_country_code(fd)
-
-            # Reset the listeners and verify the current state.
-            fd.sl4f.wlan_policy_lib.wlanSetNewListener()
-            fd.sl4f.wlan_ap_policy_lib.wlanSetNewListener()
-
-            # Verify that client connections are enabled and the AP is brought
-            # up again.
-            client_state = fd.sl4f.wlan_policy_lib.wlanGetUpdate()
-            if client_state["error"]:
-                raise signals.TestFailure(
-                    "error querying client state: {}".format(client_state["error"])
-                )
-            elif client_state["result"]["state"] != "ConnectionsEnabled":
-                raise signals.TestFailure(
-                    "client connections in unexpected state: {}".format(
-                        client_state["result"]["state"]
-                    )
-                )
-
-            ap_state = fd.sl4f.wlan_ap_policy_lib.wlanGetUpdate()
-            if ap_state["error"]:
-                raise signals.TestFailure(
-                    "error querying AP state: {}".format(ap_state["error"])
-                )
-
-            ap_updates = ap_state["result"]
-            if len(ap_updates) != 1:
-                raise signals.TestFailure("No APs are running: {}".format(ap_updates))
-            else:
-                if (
-                    ap_updates[0]["id"]["ssid"] != test_ssid
-                    or ap_updates[0]["id"]["type_"].lower() != test_security_type
-                ):
-                    raise signals.TestFailure(
-                        "AP in unexpected state: {}".format(ap_updates[0])
-                    )
-
-
-if __name__ == "__main__":
-    test_runner.main()
diff --git a/src/antlion/tests/wlan_policy/SavedNetworksTest.py b/src/antlion/tests/wlan_policy/SavedNetworksTest.py
deleted file mode 100644
index 73165c7..0000000
--- a/src/antlion/tests/wlan_policy/SavedNetworksTest.py
+++ /dev/null
@@ -1,380 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-A test that saves various networks and verifies the behavior of save, get, and
-remove through the ClientController API of WLAN policy.
-"""
-
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants, hostapd_security
-from antlion.test_utils.wifi import base_test
-from antlion.utils import rand_ascii_str, rand_hex_str
-
-from mobly import signals, test_runner
-
-PSK_LEN = 64
-TIME_WAIT_FOR_DISCONNECT = 30
-TIME_WAIT_FOR_CONNECT = 30
-
-STATE_CONNECTED = "Connected"
-STATE_CONNECTING = "Connecting"
-CONNECTIONS_ENABLED = "ConnectionsEnabled"
-CONNECTIONS_DISABLED = "ConnectionsDisabled"
-SECURITY_NONE = "none"
-WEP = "wep"
-WPA = "wpa"
-WPA2 = "wpa2"
-WPA3 = "wpa3"
-CREDENTIAL_TYPE_NONE = "none"
-PASSWORD = "password"
-PSK = "psk"
-CREDENTIAL_VALUE_NONE = ""
-
-
-class SavedNetworksTest(base_test.WifiBaseTest):
-    """WLAN policy commands test class.
-
-    Test Bed Requirement:
-    * One or more Fuchsia devices
-    * One Access Point
-    """
-
-    def setup_class(self):
-        super().setup_class()
-        # Keep track of whether we have started an access point in a test
-        if len(self.fuchsia_devices) < 1:
-            raise EnvironmentError("No Fuchsia devices found.")
-        for fd in self.fuchsia_devices:
-            fd.configure_wlan(
-                association_mechanism="policy", preserve_saved_networks=True
-            )
-
-    def setup_test(self):
-        for fd in self.fuchsia_devices:
-            if not fd.wlan_policy_controller.remove_all_networks():
-                raise EnvironmentError("Failed to remove all networks in setup")
-        self.access_points[0].stop_all_aps()
-
-    def teardown_class(self):
-        for fd in self.fuchsia_devices:
-            fd.wlan_policy_controller.remove_all_networks()
-        self.access_points[0].stop_all_aps()
-
-    def save_bad_network(self, fd, ssid, security_type, password=""):
-        """Saves a network as specified on the given device and verify that we
-        Args:
-            fd: The Fuchsia device to save the network on
-            ssid: The SSID or name of the network to save.
-            security_type: The security type to save the network as, ie "none",
-                        "wep", "wpa", "wpa2", or "wpa3"
-            password: The password to save for the network. Empty string represents
-                    no password, and PSK should be provided as 64 character hex string.
-        """
-        if fd.wlan_policy_controller.save_network(
-            ssid, security_type, password=password
-        ):
-            self.log.info(
-                "Attempting to save bad network config %s did not give an error" % ssid
-            )
-            raise signals.TestFailure("Failed to get error saving bad network")
-
-    def check_get_saved_network(
-        self, fd, ssid, security_type, credential_type, credential_value
-    ):
-        """Verify that get saved networks sees the single specified network. Used
-            for the tests that save and get a single network. Maps security types of
-            expected and actual to be case insensitive.
-        Args:
-            fd: Fuchsia device to run on.
-            ssid: The name of the network to check for.
-            security_type: The security of the network, ie "none", "wep", "wpa",
-                        "wpa2", or "wpa3".
-            credential_type: The type of credential saved for the network, ie
-                            "none", "password", or "psk".
-            credential_value: The actual credential, or "" if there is no credential.
-        """
-        expected_networks = [
-            {
-                "ssid": ssid,
-                "security_type": security_type,
-                "credential_type": credential_type,
-                "credential_value": credential_value,
-            }
-        ]
-        self.check_saved_networks(fd, expected_networks)
-
-    def check_saved_networks(self, fd, expected_networks):
-        """Verify that the saved networks we get from the device match the provided
-            list of networks.
-        Args:
-            fd: The Fuchsia device to run on.
-            expected_networks: The list of networks we expect to get from the device,
-                            unordered and in the same format as we would get:
-                            [{"credential_type": _, "credential_value": _,
-                            "security_type": _, "ssid": _}, ...] There should be
-                            no duplicates in expected networks.
-        """
-        actual_networks = list(
-            map(self.lower_case_network, fd.wlan_policy_controller.get_saved_networks())
-        )
-        expected_networks = list(
-            map(self.lower_case_network, fd.wlan_policy_controller.get_saved_networks())
-        )
-
-        if len(actual_networks) != len(expected_networks):
-            self.log.info(
-                "Number of expected saved networks does not match the actual number."
-                "Expected: %d, actual: %d"
-                % (len(actual_networks), len(expected_networks))
-            )
-            raise signals.TestFailure(
-                "Failed to get the expected number of saved networks"
-            )
-        for network in actual_networks:
-            if network not in expected_networks:
-                self.log.info(
-                    "Actual and expected networks do not match. Actual: %s,\n"
-                    "Expected: %s" % (actual_networks, expected_networks)
-                )
-                raise signals.TestFailure("Got an unexpected saved network")
-
-    def lower_case_network(self, network):
-        if "security_type" not in network:
-            self.log.error("Missing security type in network %s" % network)
-            raise signals.TestFailure("Network is missing security type")
-        if "credential_type" not in network:
-            self.log.error("Missing credential type in network %s" % network)
-            raise signals.TestFailure("Network is missing credential type")
-        {"ssid": network["ssid"], "security_type": network["security_type"]}
-
-    def save_and_check_network(self, ssid, security_type, password=""):
-        """Perform a test for saving, getting, and removing a single network on each
-            device.
-        Args:
-            ssid: The network name to use.
-            security_type: The security of the network as a string, ie "none",
-                        "wep", "wpa", "wpa2", or "wpa3" (case insensitive)
-            password: The password of the network. PSK should be given as 64
-                    hexadecimal characters and none should be an empty string.
-        """
-        for fd in self.fuchsia_devices:
-            if not fd.wlan_policy_controller.save_network(
-                ssid, security_type, password=password
-            ):
-                raise signals.TestFailure("Failed to save network")
-            self.check_get_saved_network(
-                fd, ssid, security_type, self.credentialType(password), password
-            )
-
-    def start_ap(self, ssid, security_type, password=None, hidden=False):
-        """Starts an access point.
-        Args:
-            ssid: the SSID of the network to broadcast
-            security_type: the security type of the network to be broadcasted. This can be
-                None, "wep" "wpa", "wpa2", or "wpa3" (or from hostapd_constants.py)
-            password: the password to connect to the broadcasted network. The password is ignored
-                if security type is none.
-        """
-        # Put together the security configuration of the network to be
-        # broadcasted. Open networks are represented by no security.
-        if security_type == None or security_type.upper() == SECURITY_NONE:
-            security = None
-        else:
-            security = hostapd_security.Security(
-                security_mode=security_type, password=password
-            )
-
-        if len(self.access_points) > 0:
-            # Create an AP with default values other than the specified values.
-            setup_ap(
-                self.access_points[0],
-                "whirlwind",
-                hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-                ssid,
-                security=security,
-            )
-
-        else:
-            self.log.error("No access point available for test, please check config")
-            raise EnvironmentError("Failed to set up AP for test")
-
-    def credentialType(self, credentialValue):
-        """Returns the type of the credential to compare against values reported"""
-        if len(credentialValue) == PSK_LEN:
-            return PSK
-        elif len(credentialValue) == 0:
-            return "none"
-        else:
-            return PASSWORD
-
-    def same_network_identifier(self, net_id, ssid, security_type):
-        """Returns true if the network id is made of the given ssid and security
-        type, and false otherwise. Security type check is case insensitive.
-        """
-        return (
-            net_id["ssid"] == ssid and net_id["type_"].upper() == security_type.upper()
-        )
-
-    """Tests"""
-
-    def test_open_network_with_password(self):
-        for fd in self.fuchsia_devices:
-            # Save an open network with a password and verify that it fails to
-            # save.
-            self.save_bad_network(
-                fd, rand_ascii_str(10), SECURITY_NONE, rand_ascii_str(8)
-            )
-            self.check_saved_networks(fd, {})
-
-    def test_open_network(self):
-        ssid = rand_ascii_str(10)
-        self.save_and_check_network(ssid, SECURITY_NONE)
-
-    def test_network_with_psk(self):
-        ssid = rand_ascii_str(11)
-        # PSK are translated from hex to bytes when saved, and when returned
-        # by get_saved_networks it will be lower case.
-        psk = rand_hex_str(PSK_LEN).lower()
-        self.save_and_check_network(ssid, WPA2, psk)
-
-    def test_wep_network(self):
-        ssid = rand_ascii_str(12)
-        password = rand_ascii_str(13)
-        self.save_and_check_network(ssid, WEP, password)
-
-    def test_wpa2_network(self):
-        ssid = rand_ascii_str(9)
-        password = rand_ascii_str(15)
-        self.save_and_check_network(ssid, WPA2, password)
-
-    def test_wpa_network(self):
-        ssid = rand_ascii_str(16)
-        password = rand_ascii_str(9)
-        self.save_and_check_network(ssid, WPA, password)
-
-    def test_wpa3_network(self):
-        ssid = rand_ascii_str(9)
-        password = rand_ascii_str(15)
-        self.save_and_check_network(ssid, WPA3, password)
-
-    def test_save_network_persists(self):
-        ssid = rand_ascii_str(10)
-        security = WPA2
-        password = rand_ascii_str(10)
-        for fd in self.fuchsia_devices:
-            if not fd.wlan_policy_controller.save_network(
-                ssid, security, password=password
-            ):
-                raise signals.TestFailure("Failed to save network")
-            # Reboot the device. The network should be persistently saved
-            # before the command is completed.
-            fd.reboot()
-            self.check_get_saved_network(fd, ssid, security, PASSWORD, password)
-
-    def test_same_ssid_diff_security(self):
-        for fd in self.fuchsia_devices:
-            saved_networks = fd.wlan_policy_controller.get_saved_networks()
-            ssid = rand_ascii_str(19)
-            password = rand_ascii_str(12)
-            if not fd.wlan_policy_controller.save_network(
-                ssid, WPA2, password=password
-            ):
-                raise signals.TestFailure("Failed to save network")
-            saved_networks.append(
-                {
-                    "ssid": ssid,
-                    "security_type": WPA2,
-                    "credential_type": PASSWORD,
-                    "credential_value": password,
-                }
-            )
-            if not fd.wlan_policy_controller.save_network(ssid, SECURITY_NONE):
-                raise signals.TestFailure("Failed to save network")
-            saved_networks.append(
-                {
-                    "ssid": ssid,
-                    "security_type": SECURITY_NONE,
-                    "credential_type": CREDENTIAL_TYPE_NONE,
-                    "credential_value": CREDENTIAL_VALUE_NONE,
-                }
-            )
-            actual_networks = fd.wlan_policy_controller.get_saved_networks()
-            # Both should be saved and present in network store since the have
-            # different security types and therefore different network identifiers.
-            self.check_saved_networks(fd, actual_networks)
-
-    def test_remove_disconnects(self):
-        # If we save, connect to, then remove the network while still connected
-        # to it, we expect the network will disconnect. This test requires a
-        # wpa2 network in the test config. Remove all other networks first so
-        # that we can't auto connect to them
-        ssid = rand_ascii_str(10)
-        security = WPA2
-        password = rand_ascii_str(10)
-        self.start_ap(ssid, security, password)
-
-        for fd in self.fuchsia_devices:
-            fd.wlan_policy_controller.wait_for_no_connections()
-
-            if not fd.wlan_policy_controller.save_and_connect:
-                raise signals.TestFailure("Failed to saved and connect to network")
-
-            if (
-                not fd.wlan_policy_controller.remove_all_networks_and_wait_for_no_connections()
-            ):
-                raise signals.TestFailure("Failed to disconnect from removed network")
-
-    def test_auto_connect_open(self):
-        # Start up AP with an open network with a random SSID
-        ssid = rand_ascii_str(10)
-        self.start_ap(ssid, None)
-        for fd in self.fuchsia_devices:
-            fd.wlan_policy_controller.wait_for_no_connections()
-
-            # Save the network and make sure that we see the device auto connect to it.
-            security = SECURITY_NONE
-            password = CREDENTIAL_VALUE_NONE
-            if not fd.wlan_policy_controller.save_network(
-                ssid, security, password=password
-            ):
-                raise signals.TestFailure("Failed to save network")
-            if not fd.wlan_policy_controller.wait_for_connect(
-                ssid, security, timeout=TIME_WAIT_FOR_CONNECT
-            ):
-                raise signals.TestFailure("Failed to connect to network")
-
-    def test_auto_connect_wpa3(self):
-        # Start up AP with an open network with a random SSID
-        ssid = rand_ascii_str(10)
-        security = WPA3
-        password = rand_ascii_str(10)
-        self.start_ap(ssid, security, password)
-        for fd in self.fuchsia_devices:
-            fd.wlan_policy_controller.wait_for_no_connections()
-
-            # Save the network and make sure that we see the device auto connect to it.
-            if not fd.wlan_policy_controller.save_network(
-                ssid, security, password=password
-            ):
-                raise signals.TestFailure("Failed to save network")
-            if not fd.wlan_policy_controller.wait_for_connect(
-                ssid, security, timeout=TIME_WAIT_FOR_CONNECT
-            ):
-                raise signals.TestFailure("Failed to connect to network")
-
-
-if __name__ == "__main__":
-    test_runner.main()
diff --git a/src/antlion/tests/wlan_policy/StartStopClientConnectionsTest.py b/src/antlion/tests/wlan_policy/StartStopClientConnectionsTest.py
deleted file mode 100644
index 0b87348..0000000
--- a/src/antlion/tests/wlan_policy/StartStopClientConnectionsTest.py
+++ /dev/null
@@ -1,210 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-
-from antlion.controllers.access_point import setup_ap
-from antlion.controllers.ap_lib import hostapd_constants, hostapd_security
-from antlion.test_utils.wifi import base_test
-from antlion.utils import rand_ascii_str
-
-from mobly import signals, test_runner
-
-DISCONNECTED = "Disconnected"
-CONNECTION_STOPPED = "ConnectionStopped"
-CONNECTIONS_ENABLED = "ConnectionsEnabled"
-CONNECTIONS_DISABLED = "ConnectionsDisabled"
-WPA2 = "wpa2"
-UPDATE_TIMEOUT_SEC = 5
-
-
-class StartStopClientConnectionsTest(base_test.WifiBaseTest):
-    """Tests that we see the expected behavior with enabling and disabling
-        client connections
-
-    Test Bed Requirement:
-    * One or more Fuchsia devices
-    * One Access Point
-    """
-
-    def setup_class(self):
-        super().setup_class()
-        # Start an AP with a hidden network
-        self.ssid = rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
-        self.access_point = self.access_points[0]
-        self.password = rand_ascii_str(hostapd_constants.AP_PASSPHRASE_LENGTH_2G)
-        self.security_type = WPA2
-        security = hostapd_security.Security(
-            security_mode=self.security_type, password=self.password
-        )
-
-        self.access_point.stop_all_aps()
-        # TODO(63719) use varying values for AP that shouldn't affect the test.
-        setup_ap(
-            self.access_point,
-            "whirlwind",
-            hostapd_constants.AP_DEFAULT_CHANNEL_5G,
-            self.ssid,
-            security=security,
-        )
-
-        if len(self.fuchsia_devices) < 1:
-            raise EnvironmentError("No Fuchsia devices found.")
-        for fd in self.fuchsia_devices:
-            fd.configure_wlan(
-                association_mechanism="policy", preserve_saved_networks=True
-            )
-
-    def setup_test(self):
-        for fd in self.fuchsia_devices:
-            if not fd.wlan_policy_controller.remove_all_networks():
-                raise EnvironmentError("Failed to remove all networks in setup")
-
-    def teardown_class(self):
-        self.access_point.stop_all_aps()
-
-    def connect_and_validate(self, fd, ssid, security_type, expected_response):
-        """Sends a connect request to the device and verifies we get a response
-        without error. This does not validate that a connection will be
-        attempted. This will fail the test if there is an error sending the
-        connect request, or if we don't get the expected connect response."""
-        result_connect = fd.sl4f.wlan_policy_lib.wlanConnect(ssid, security_type)
-        if result_connect.get("error") != None:
-            self.log.error(
-                "Error occurred requesting a connection: %s"
-                % result_connect.get("error")
-            )
-            raise EnvironmentError("Failed to send connect request")
-        response = result_connect.get("result")
-        if response != expected_response:
-            self.log.error(
-                'Incorrect connect request response. Expected: "%s", Actual: %s'
-                % (expected_response, response)
-            )
-            raise signals.TestFailure("Failed to get expected connect response")
-
-    def await_state_update(self, fd, desired_state, timeout):
-        """This function polls the policy client state until it converges to
-            the caller's desired state.
-
-        Args:
-            fd: A FuchsiaDevice
-            desired_state: The expected client policy state.
-            timeout: Number of seconds to wait for the policy state to become
-                     the desired_state.
-        Returns:
-            None assuming the desired state has been reached.
-        Raises:
-            TestFailure if the desired state is not reached by the timeout.
-        """
-        start_time = time.time()
-        curr_state = None
-        while time.time() < start_time + timeout:
-            fd.sl4f.wlan_policy_lib.wlanSetNewListener()
-            curr_state = fd.sl4f.wlan_policy_lib.wlanGetUpdate()
-            if curr_state.get("error"):
-                self.log.error(
-                    "Error occurred getting status update: %s" % curr_state.get("error")
-                )
-                raise EnvironmentError("Failed to get update")
-
-            if curr_state.get("result") and curr_state.get("result") == desired_state:
-                return
-
-            time.sleep(1)
-
-        self.log.error(
-            "Client state did not converge to the expected state in %s "
-            "seconds. Expected update: %s Actual update: %s"
-            % (timeout, desired_state, curr_state)
-        )
-        raise signals.TestFailure("Client policy layer is in unexpected state")
-
-    def test_stop_client_connections_update(self):
-        for fd in self.fuchsia_devices:
-            if not fd.wlan_policy_controller.stop_client_connections():
-                raise EnvironmentError("Failed to stop client connecions")
-
-            # Check that the most recent update says that the device is not
-            # connected to anything and client connections are disabled
-            expected_update = {"networks": [], "state": CONNECTIONS_DISABLED}
-            self.await_state_update(fd, expected_update, UPDATE_TIMEOUT_SEC)
-
-    def test_start_client_connections_update(self):
-        for fd in self.fuchsia_devices:
-            if not fd.wlan_policy_controller.start_client_connections():
-                raise EnvironmentError("Failed to start client connecions")
-
-            # Check that the most recent update says that the device is not
-            # connected to anything and client connections are disabled
-            expected_update = {"networks": [], "state": CONNECTIONS_ENABLED}
-            self.await_state_update(fd, expected_update, UPDATE_TIMEOUT_SEC)
-
-    def test_stop_client_connections_rejects_connections(self):
-        # Test that if we turn client connections off, our requests to connect
-        # are rejected.
-        for fd in self.fuchsia_devices:
-            if not fd.wlan_policy_controller.stop_client_connections():
-                raise EnvironmentError("Failed to stop client connecions")
-
-            # Save the network, otherwise connecting may fail because the
-            # network is not saved instead of client connections being off
-            if not fd.wlan_policy_controller.save_network(
-                self.ssid, self.security_type, password=self.password
-            ):
-                raise EnvironmentError("Failed to save network")
-            expected_response = "RejectedIncompatibleMode"
-            self.connect_and_validate(
-                fd, self.ssid, self.security_type, expected_response
-            )
-
-    def test_start_stop_client_connections(self):
-        # Test that if we turn client connections on the device will connect,
-        # and if we turn of client connections the device will disconnect.
-        for fd in self.fuchsia_devices:
-            # Start client connections and check that we can
-            if not fd.wlan_policy_controller.save_network(
-                self.ssid, self.security_type, password=self.password
-            ):
-                raise EnvironmentError("Failed to save network")
-            if not fd.wlan_policy_controller.start_client_connections():
-                raise EnvironmentError("Failed to start client connections")
-
-            expected_response = "Acknowledged"
-            self.connect_and_validate(
-                fd, self.ssid, self.security_type, expected_response
-            )
-
-            if not fd.wlan_policy_controller.wait_for_connect(
-                self.ssid, self.security_type
-            ):
-                raise signals.TestFailure(
-                    "Failed to connect after starting client connections"
-                )
-
-            # Stop client connections again and check that we disconnect
-            if not fd.wlan_policy_controller.stop_client_connections():
-                raise EnvironmentError("Failed to stop client connecions")
-            if not fd.wlan_policy_controller.wait_for_disconnect(
-                self.ssid, self.security_type, DISCONNECTED, CONNECTION_STOPPED
-            ):
-                raise signals.TestFailure(
-                    "Failed to disconnect after client connections stopped"
-                )
-
-
-if __name__ == "__main__":
-    test_runner.main()
diff --git a/src/antlion/tracelogger.py b/src/antlion/tracelogger.py
deleted file mode 100644
index 1157379..0000000
--- a/src/antlion/tracelogger.py
+++ /dev/null
@@ -1,67 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import inspect
-import os
-
-
-class TraceLogger(object):
-    def __init__(self, logger):
-        self._logger = logger
-
-    @staticmethod
-    def _get_trace_info(level=1, offset=2):
-        # We want the stack frame above this and above the error/warning/info
-        inspect_stack = inspect.stack()
-        trace_info = ""
-        for i in range(level):
-            try:
-                stack_frames = inspect_stack[offset + i]
-                info = inspect.getframeinfo(stack_frames[0])
-                trace_info = "%s[%s:%s:%s]" % (
-                    trace_info,
-                    os.path.basename(info.filename),
-                    info.function,
-                    info.lineno,
-                )
-            except IndexError:
-                break
-        return trace_info
-
-    def _log_with(self, logging_lambda, trace_level, msg, *args, **kwargs):
-        trace_info = TraceLogger._get_trace_info(level=trace_level, offset=3)
-        logging_lambda("%s %s" % (msg, trace_info), *args, **kwargs)
-
-    def exception(self, msg, *args, **kwargs):
-        self._log_with(self._logger.exception, 5, msg, *args, **kwargs)
-
-    def debug(self, msg, *args, **kwargs):
-        self._log_with(self._logger.debug, 3, msg, *args, **kwargs)
-
-    def error(self, msg, *args, **kwargs):
-        self._log_with(self._logger.error, 3, msg, *args, **kwargs)
-
-    def warn(self, msg, *args, **kwargs):
-        self._log_with(self._logger.warn, 3, msg, *args, **kwargs)
-
-    def warning(self, msg, *args, **kwargs):
-        self._log_with(self._logger.warning, 3, msg, *args, **kwargs)
-
-    def info(self, msg, *args, **kwargs):
-        self._log_with(self._logger.info, 1, msg, *args, **kwargs)
-
-    def __getattr__(self, name):
-        return getattr(self._logger, name)
diff --git a/src/antlion/unit_tests/AttenuatorSanityTest.py b/src/antlion/unit_tests/AttenuatorSanityTest.py
deleted file mode 100644
index 94a15f3..0000000
--- a/src/antlion/unit_tests/AttenuatorSanityTest.py
+++ /dev/null
@@ -1,65 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import random
-from antlion.base_test import BaseTestClass
-
-CONSERVATIVE_MAX_ATTEN_VALUE = 10
-MIN_ATTEN_VALUE = 0
-
-
-class AttenuatorSanityTest(BaseTestClass):
-    def __init__(self, controllers):
-        BaseTestClass.__init__(self, controllers)
-        self.tests = (
-            "test_attenuator_validation",
-            "test_attenuator_get_max_value",
-        )
-        self.number_of_iteration = 2
-
-    def test_attenuator_validation(self):
-        """Validate attenuator set and get APIs works fine."""
-        for atten in self.attenuators:
-            self.log.info("Attenuator: {}".format(atten))
-            try:
-                atten_max_value = atten.get_max_atten()
-            except ValueError as e:
-                self.log.error(e)
-                self.log.info("Using conservative max value.")
-                atten_max_value = CONSERVATIVE_MAX_ATTEN_VALUE
-
-            atten_value_list = [MIN_ATTEN_VALUE, atten_max_value]
-            for i in range(0, self.number_of_iteration):
-                atten_value_list.append(int(random.uniform(0, atten_max_value)))
-
-            for atten_val in atten_value_list:
-                self.log.info("Set atten to {}".format(atten_val))
-                atten.set_atten(atten_val)
-                current_atten = int(atten.get_atten())
-                self.log.info("Current atten = {}".format(current_atten))
-                assert atten_val == current_atten, "Setting attenuator failed."
-
-        return True
-
-    def test_attenuator_get_max_value(self):
-        """Validate attenuator get_max_atten APIs works fine."""
-        for atten in self.attenuators:
-            try:
-                atten_max_value = atten.get_max_atten()
-            except ValueError as e:
-                self.log.error(e)
-                return False
-        return True
diff --git a/src/antlion/unit_tests/IntegrationTest.py b/src/antlion/unit_tests/IntegrationTest.py
deleted file mode 100755
index 8a55e80..0000000
--- a/src/antlion/unit_tests/IntegrationTest.py
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/python3.4
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import base_test
-from antlion import test_runner
-
-from mobly import asserts
-
-import mock_controller
-
-
-class IntegrationTest(base_test.BaseTestClass):
-    def setup_class(self):
-        self.register_controller(mock_controller)
-
-    def test_hello_world(self):
-        asserts.assert_equal(self.user_params["icecream"], 42)
-        asserts.assert_equal(self.user_params["extra_param"], "haha")
-        self.log.info(
-            "This is a bare minimal test to make sure the basic ACTS" "test flow works."
-        )
-        asserts.explicit_pass("Hello World")
diff --git a/src/antlion/unit_tests/Sl4aSanityTest.py b/src/antlion/unit_tests/Sl4aSanityTest.py
deleted file mode 100644
index f505044..0000000
--- a/src/antlion/unit_tests/Sl4aSanityTest.py
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion.base_test import BaseTestClass
-
-
-class Sl4aSanityTest(BaseTestClass):
-    """Tests for sl4a basic sanity.
-
-    Run these tests individually with option -r 100.
-    """
-
-    def __init__(self, controllers):
-        BaseTestClass.__init__(self, controllers)
-        self.tests = ("test_bring_up_and_shutdown", "test_message_then_shutdown_stress")
-
-    def test_bring_up_and_shutdown(self):
-        """Constantly start and terminate sl4a sessions.
-
-        Verify in log that the "manager map key" is always empty before a
-        session starts.
-        Verify in log by looking at timestamps that after the test finishes, no
-        more message regarding sl4a happens.
-        """
-        ad = self.android_devices[0]
-        for i in range(100):
-            self.log.info("Iteration %d, terminating." % i)
-            ad.terminate_all_sessions()
-            self.log.info("Iteration %d, starting." % i)
-            droid, ed = ad.get_droid()
-        return True
diff --git a/src/antlion/unit_tests/SnifferSanityTest.py b/src/antlion/unit_tests/SnifferSanityTest.py
deleted file mode 100644
index 6332143..0000000
--- a/src/antlion/unit_tests/SnifferSanityTest.py
+++ /dev/null
@@ -1,79 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from antlion import base_test
-from antlion.controllers.sniffer import Sniffer
-
-
-class SnifferSanityTest(base_test.BaseTestClass):
-    def setup_class(self):
-        self._channels = [6, 44]
-
-        # capture (sniff) for 30 seconds or 10 packets - whichever comes first
-        self._capture_sec = 30
-        self._packet_count = 10
-
-        self._filter = {
-            "tcpdump": "type mgt subtype beacon",
-            "tshark": "type mgt subtype beacon",
-        }
-
-    def test_sniffer_validation_using_with(self):
-        """Validate sniffer configuration & capture API using the 'with' clause.
-
-        This is the standard example - this syntax should typically be used.
-        """
-        index = 0
-        for sniffer in self.sniffers:
-            for channel in self._channels:
-                with sniffer.start_capture(
-                    override_configs={Sniffer.CONFIG_KEY_CHANNEL: channel},
-                    duration=self._capture_sec,
-                    packet_count=self._packet_count,
-                ):
-                    self.log.info("Capture: %s", sniffer.get_capture_file())
-
-    def test_sniffer_validation_manual(self):
-        """Validate sniffer configuration & capture API using a manual/raw
-        API mechanism.
-
-        The standard process should use a with clause. This demonstrates the
-        manual process which uses an explicit wait_for_capture() call.
-        Alternatively, could also use a sleep() + stop_capture() process
-        (though that mechanism won't terminate early if the capture is done).
-        """
-        index = 0
-        for sniffer in self.sniffers:
-            for channel in self._channels:
-                sniffer.start_capture(
-                    override_configs={Sniffer.CONFIG_KEY_CHANNEL: channel},
-                    packet_count=self._packet_count,
-                )
-                self.log.info("Capture: %s", sniffer.get_capture_file())
-                sniffer.wait_for_capture(timeout=self._capture_sec)
-
-    def test_sniffer_validation_capture_3_beacons(self):
-        """Demonstrate the use of additional configuration."""
-        index = 0
-        for sniffer in self.sniffers:
-            for channel in self._channels:
-                with sniffer.start_capture(
-                    override_configs={Sniffer.CONFIG_KEY_CHANNEL: channel},
-                    duration=self._capture_sec,
-                    packet_count=3,
-                    additional_args=self._filter[sniffer.get_subtype()],
-                ):
-                    self.log.info("Capture: %s", sniffer.get_capture_file())
diff --git a/src/antlion/unit_tests/acts_base_class_test.py b/src/antlion/unit_tests/acts_base_class_test.py
deleted file mode 100755
index a0b9a1d..0000000
--- a/src/antlion/unit_tests/acts_base_class_test.py
+++ /dev/null
@@ -1,1128 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import shutil
-import tempfile
-import unittest
-
-import mock
-import mock_controller
-
-from antlion import base_test
-from antlion import signals
-
-from mobly import asserts
-from mobly import base_test as mobly_base_test
-import mobly.config_parser as mobly_config_parser
-
-MSG_EXPECTED_EXCEPTION = "This is an expected exception."
-MSG_EXPECTED_TEST_FAILURE = "This is an expected test failure."
-MSG_UNEXPECTED_EXCEPTION = "Unexpected exception!"
-
-MOCK_EXTRA = {"key": "value", "answer_to_everything": 42}
-
-
-def never_call():
-    raise Exception(MSG_UNEXPECTED_EXCEPTION)
-
-
-class SomeError(Exception):
-    """A custom exception class used for tests in this module."""
-
-
-class ActsBaseClassTest(unittest.TestCase):
-    def setUp(self):
-        self.tmp_dir = tempfile.mkdtemp()
-        self.tb_key = "testbed_configs"
-        self.test_run_config = mobly_config_parser.TestRunConfig()
-        self.test_run_config.testbed_name = "SampleTestBed"
-        self.test_run_config.controller_configs = {
-            self.tb_key: {
-                "name": self.test_run_config.testbed_name,
-            },
-        }
-        self.test_run_config.log_path = self.tmp_dir
-        self.test_run_config.user_params = {"some_param": "hahaha"}
-        self.test_run_config.summary_writer = mock.MagicMock()
-        self.mock_test_name = "test_something"
-
-    def tearDown(self):
-        shutil.rmtree(self.tmp_dir)
-
-    def test_current_test_case_name(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_func(self):
-                asserts.assert_true(
-                    self.current_test_name == "test_func",
-                    "Got unexpected test name %s." % self.current_test_name,
-                )
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=["test_func"])
-        actual_record = bt_cls.results.passed[0]
-        self.assertEqual(actual_record.test_name, "test_func")
-        self.assertIsNone(actual_record.details)
-        self.assertIsNone(actual_record.extras)
-
-    def test_self_tests_list(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def __init__(self, controllers):
-                super(MockBaseTest, self).__init__(controllers)
-                self.tests = ("test_something",)
-
-            def test_something(self):
-                pass
-
-            def test_never(self):
-                # This should not execute it's not on default test list.
-                never_call()
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.passed[0]
-        self.assertEqual(actual_record.test_name, "test_something")
-
-    def test_cli_test_selection_match_self_tests_list(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def __init__(self, controllers):
-                super(MockBaseTest, self).__init__(controllers)
-                self.tests = (
-                    "test_star1",
-                    "test_star2",
-                    "test_question_mark",
-                    "test_char_seq",
-                    "test_no_match",
-                )
-
-            def test_star1(self):
-                pass
-
-            def test_star2(self):
-                pass
-
-            def test_question_mark(self):
-                pass
-
-            def test_char_seq(self):
-                pass
-
-            def test_no_match(self):
-                # This should not execute because it does not match any regex
-                # in the cmd line input.
-                never_call()
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        test_names = [
-            "test_st*r1",
-            "test_*2",
-            "test_?uestion_mark",
-            "test_c[fghi]ar_seq",
-        ]
-        bt_cls.run(test_names=test_names)
-        passed_names = [p.test_name for p in bt_cls.results.passed]
-        self.assertEqual(len(passed_names), len(test_names))
-        for test in ["test_star1", "test_star2", "test_question_mark", "test_char_seq"]:
-            self.assertIn(test, passed_names)
-
-    def test_default_execution_of_all_tests(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_something(self):
-                pass
-
-            def not_a_test(self):
-                # This should not execute its name doesn't follow test case
-                # naming convention.
-                never_call()
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.passed[0]
-        self.assertEqual(actual_record.test_name, "test_something")
-
-    def test_setup_class_fail_by_exception(self):
-        call_check = mock.MagicMock()
-
-        class MockBaseTest(base_test.BaseTestClass):
-            def setup_class(self):
-                raise Exception(MSG_EXPECTED_EXCEPTION)
-
-            def test_something(self):
-                # This should not execute because setup_class failed.
-                never_call()
-
-            def on_skip(self, test_name, begin_time):
-                call_check("haha")
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.error[0]
-        self.assertEqual(actual_record.test_name, "test_something")
-        expected_summary = {
-            "Error": 1,
-            "Executed": 1,
-            "Failed": 0,
-            "Passed": 0,
-            "Requested": 1,
-            "Skipped": 0,
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-        call_check.assert_called_once_with("haha")
-
-    def test_setup_test_fail_by_exception(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def setup_test(self):
-                raise Exception(MSG_EXPECTED_EXCEPTION)
-
-            def test_something(self):
-                # This should not execute because setup_test failed.
-                never_call()
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=["test_something"])
-        actual_record = bt_cls.results.error[0]
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertIsNone(actual_record.extras)
-        expected_summary = {
-            "Error": 1,
-            "Executed": 1,
-            "Failed": 0,
-            "Passed": 0,
-            "Requested": 1,
-            "Skipped": 0,
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_setup_test_fail_by_test_signal(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def setup_test(self):
-                raise signals.TestFailure(MSG_EXPECTED_EXCEPTION)
-
-            def test_something(self):
-                # This should not execute because setup_test failed.
-                never_call()
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=["test_something"])
-        actual_record = bt_cls.results.failed[0]
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertIsNone(actual_record.extras)
-        expected_summary = {
-            "Error": 0,
-            "Executed": 1,
-            "Failed": 1,
-            "Passed": 0,
-            "Requested": 1,
-            "Skipped": 0,
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_setup_test_fail_by_return_False(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def setup_test(self):
-                return False
-
-            def test_something(self):
-                # This should not execute because setup_test failed.
-                never_call()
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=["test_something"])
-        actual_record = bt_cls.results.failed[0]
-        expected_msg = "Setup for %s failed." % self.mock_test_name
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, expected_msg)
-        self.assertIsNone(actual_record.extras)
-        expected_summary = {
-            "Error": 0,
-            "Executed": 1,
-            "Failed": 1,
-            "Passed": 0,
-            "Requested": 1,
-            "Skipped": 0,
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_teardown_test_assert_fail(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def teardown_test(self):
-                asserts.assert_true(False, MSG_EXPECTED_EXCEPTION)
-
-            def test_something(self):
-                pass
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.error[0]
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertIsNone(actual_record.extras)
-        expected_summary = {
-            "Error": 1,
-            "Executed": 1,
-            "Failed": 0,
-            "Passed": 0,
-            "Requested": 1,
-            "Skipped": 0,
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_teardown_test_raise_exception(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def teardown_test(self):
-                raise Exception(MSG_EXPECTED_EXCEPTION)
-
-            def test_something(self):
-                pass
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.error[0]
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertIsNone(actual_record.extras)
-        expected_summary = {
-            "Error": 1,
-            "Executed": 1,
-            "Failed": 0,
-            "Passed": 0,
-            "Requested": 1,
-            "Skipped": 0,
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_teardown_test_executed_if_test_pass(self):
-        my_mock = mock.MagicMock()
-
-        class MockBaseTest(base_test.BaseTestClass):
-            def teardown_test(self):
-                my_mock("teardown_test")
-
-            def test_something(self):
-                pass
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.passed[0]
-        my_mock.assert_called_once_with("teardown_test")
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertIsNone(actual_record.details)
-        self.assertIsNone(actual_record.extras)
-        expected_summary = {
-            "Error": 0,
-            "Executed": 1,
-            "Failed": 0,
-            "Passed": 1,
-            "Requested": 1,
-            "Skipped": 0,
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_teardown_test_executed_if_setup_test_fails(self):
-        my_mock = mock.MagicMock()
-
-        class MockBaseTest(base_test.BaseTestClass):
-            def setup_test(self):
-                raise Exception(MSG_EXPECTED_EXCEPTION)
-
-            def teardown_test(self):
-                my_mock("teardown_test")
-
-            def test_something(self):
-                pass
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.error[0]
-        my_mock.assert_called_once_with("teardown_test")
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertIsNone(actual_record.extras)
-        expected_summary = {
-            "Error": 1,
-            "Executed": 1,
-            "Failed": 0,
-            "Passed": 0,
-            "Requested": 1,
-            "Skipped": 0,
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_teardown_test_executed_if_test_fails(self):
-        my_mock = mock.MagicMock()
-
-        class MockBaseTest(base_test.BaseTestClass):
-            def teardown_test(self):
-                my_mock("teardown_test")
-
-            def test_something(self):
-                raise Exception(MSG_EXPECTED_EXCEPTION)
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.error[0]
-        my_mock.assert_called_once_with("teardown_test")
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertIsNone(actual_record.extras)
-        expected_summary = {
-            "Error": 1,
-            "Executed": 1,
-            "Failed": 0,
-            "Passed": 0,
-            "Requested": 1,
-            "Skipped": 0,
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_on_exception_executed_if_teardown_test_fails(self):
-        my_mock = mock.MagicMock()
-
-        class MockBaseTest(base_test.BaseTestClass):
-            def on_exception(self, test_name, begin_time):
-                my_mock("on_exception")
-
-            def teardown_test(self):
-                raise Exception(MSG_EXPECTED_EXCEPTION)
-
-            def test_something(self):
-                pass
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        my_mock.assert_called_once_with("on_exception")
-        actual_record = bt_cls.results.error[0]
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertIsNone(actual_record.extras)
-        expected_summary = {
-            "Error": 1,
-            "Executed": 1,
-            "Failed": 0,
-            "Passed": 0,
-            "Requested": 1,
-            "Skipped": 0,
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_on_fail_executed_if_test_fails(self):
-        my_mock = mock.MagicMock()
-
-        class MockBaseTest(base_test.BaseTestClass):
-            def on_fail(self, test_name, begin_time):
-                my_mock("on_fail")
-
-            def test_something(self):
-                asserts.assert_true(False, MSG_EXPECTED_EXCEPTION)
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        my_mock.assert_called_once_with("on_fail")
-        actual_record = bt_cls.results.failed[0]
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertIsNone(actual_record.extras)
-        expected_summary = {
-            "Error": 0,
-            "Executed": 1,
-            "Failed": 1,
-            "Passed": 0,
-            "Requested": 1,
-            "Skipped": 0,
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_on_fail_executed_if_test_setup_fails_by_exception(self):
-        my_mock = mock.MagicMock()
-
-        class MockBaseTest(base_test.BaseTestClass):
-            def setup_test(self):
-                raise Exception(MSG_EXPECTED_EXCEPTION)
-
-            def on_fail(self, test_name, begin_time):
-                my_mock("on_fail")
-
-            def test_something(self):
-                pass
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        my_mock.assert_called_once_with("on_fail")
-        actual_record = bt_cls.results.error[0]
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertIsNone(actual_record.extras)
-        expected_summary = {
-            "Error": 1,
-            "Executed": 1,
-            "Failed": 0,
-            "Passed": 0,
-            "Requested": 1,
-            "Skipped": 0,
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_on_fail_executed_if_test_setup_fails_by_return_False(self):
-        my_mock = mock.MagicMock()
-
-        class MockBaseTest(base_test.BaseTestClass):
-            def setup_test(self):
-                return False
-
-            def on_fail(self, test_name, begin_time):
-                my_mock("on_fail")
-
-            def test_something(self):
-                pass
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        my_mock.assert_called_once_with("on_fail")
-        actual_record = bt_cls.results.failed[0]
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, "Setup for test_something failed.")
-        self.assertIsNone(actual_record.extras)
-        expected_summary = {
-            "Error": 0,
-            "Executed": 1,
-            "Failed": 1,
-            "Passed": 0,
-            "Requested": 1,
-            "Skipped": 0,
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_failure_to_call_procedure_function_is_recorded(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            # Wrong method signature; will raise exception
-            def on_pass(self):
-                pass
-
-            def test_something(self):
-                asserts.explicit_pass(MSG_EXPECTED_EXCEPTION)
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.error[0]
-        self.assertIn("_on_pass", actual_record.extra_errors)
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertIsNone(actual_record.extras)
-        expected_summary = {
-            "Error": 1,
-            "Executed": 1,
-            "Failed": 0,
-            "Passed": 0,
-            "Requested": 1,
-            "Skipped": 0,
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_failure_in_procedure_functions_is_recorded(self):
-        expected_msg = "Something failed in on_pass."
-
-        class MockBaseTest(base_test.BaseTestClass):
-            def on_pass(self, test_name, begin_time):
-                raise Exception(expected_msg)
-
-            def test_something(self):
-                asserts.explicit_pass(MSG_EXPECTED_EXCEPTION)
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.error[0]
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertIsNone(actual_record.extras)
-        expected_summary = {
-            "Error": 1,
-            "Executed": 1,
-            "Failed": 0,
-            "Passed": 0,
-            "Requested": 1,
-            "Skipped": 0,
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_both_teardown_and_test_body_raise_exceptions(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def teardown_test(self):
-                asserts.assert_true(False, MSG_EXPECTED_EXCEPTION)
-
-            def test_something(self):
-                raise Exception("Test Body Exception.")
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.error[0]
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, "Test Body Exception.")
-        self.assertIsNone(actual_record.extras)
-        self.assertEqual(
-            actual_record.extra_errors["teardown_test"].details,
-            "This is an expected exception.",
-        )
-        expected_summary = {
-            "Error": 1,
-            "Executed": 1,
-            "Failed": 0,
-            "Passed": 0,
-            "Requested": 1,
-            "Skipped": 0,
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_explicit_pass_but_teardown_test_raises_an_exception(self):
-        """Test record result should be marked as UNKNOWN as opposed to PASS."""
-
-        class MockBaseTest(base_test.BaseTestClass):
-            def teardown_test(self):
-                asserts.assert_true(False, MSG_EXPECTED_EXCEPTION)
-
-            def test_something(self):
-                asserts.explicit_pass("Test Passed!")
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.error[0]
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, "Test Passed!")
-        self.assertIsNone(actual_record.extras)
-        self.assertEqual(
-            actual_record.extra_errors["teardown_test"].details,
-            "This is an expected exception.",
-        )
-        expected_summary = {
-            "Error": 1,
-            "Executed": 1,
-            "Failed": 0,
-            "Passed": 0,
-            "Requested": 1,
-            "Skipped": 0,
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_on_pass_raise_exception(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def on_pass(self, test_name, begin_time):
-                raise Exception(MSG_EXPECTED_EXCEPTION)
-
-            def test_something(self):
-                asserts.explicit_pass(MSG_EXPECTED_EXCEPTION, extras=MOCK_EXTRA)
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.error[0]
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertEqual(actual_record.extras, MOCK_EXTRA)
-        expected_summary = {
-            "Error": 1,
-            "Executed": 1,
-            "Failed": 0,
-            "Passed": 0,
-            "Requested": 1,
-            "Skipped": 0,
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_on_fail_raise_exception(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def on_fail(self, test_name, begin_time):
-                raise Exception(MSG_EXPECTED_EXCEPTION)
-
-            def test_something(self):
-                asserts.fail(MSG_EXPECTED_EXCEPTION, extras=MOCK_EXTRA)
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.failed[0]
-        self.assertEqual(bt_cls.results.error, [])
-        self.assertEqual(actual_record.test_name, self.mock_test_name)
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertEqual(actual_record.extras, MOCK_EXTRA)
-        expected_summary = {
-            "Error": 0,
-            "Executed": 1,
-            "Failed": 1,
-            "Passed": 0,
-            "Requested": 1,
-            "Skipped": 0,
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_abort_class(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_1(self):
-                pass
-
-            def test_2(self):
-                asserts.abort_class(MSG_EXPECTED_EXCEPTION)
-                never_call()
-
-            def test_3(self):
-                never_call()
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=["test_1", "test_2", "test_3"])
-        self.assertEqual(bt_cls.results.passed[0].test_name, "test_1")
-        self.assertEqual(bt_cls.results.failed[0].details, MSG_EXPECTED_EXCEPTION)
-        expected_summary = {
-            "Error": 0,
-            "Executed": 2,
-            "Failed": 1,
-            "Passed": 1,
-            "Requested": 3,
-            "Skipped": 0,
-        }
-        self.assertEqual(bt_cls.results.summary_dict(), expected_summary)
-
-    def test_uncaught_exception(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_func(self):
-                raise Exception(MSG_EXPECTED_EXCEPTION)
-                never_call()
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=["test_func"])
-        actual_record = bt_cls.results.error[0]
-        self.assertEqual(actual_record.test_name, "test_func")
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertIsNone(actual_record.extras)
-
-    def test_fail(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_func(self):
-                asserts.fail(MSG_EXPECTED_EXCEPTION, extras=MOCK_EXTRA)
-                never_call()
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=["test_func"])
-        actual_record = bt_cls.results.failed[0]
-        self.assertEqual(actual_record.test_name, "test_func")
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertEqual(actual_record.extras, MOCK_EXTRA)
-
-    def test_assert_true(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_func(self):
-                asserts.assert_true(False, MSG_EXPECTED_EXCEPTION, extras=MOCK_EXTRA)
-                never_call()
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=["test_func"])
-        actual_record = bt_cls.results.failed[0]
-        self.assertEqual(actual_record.test_name, "test_func")
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertEqual(actual_record.extras, MOCK_EXTRA)
-
-    def test_assert_equal_pass(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_func(self):
-                asserts.assert_equal(1, 1, extras=MOCK_EXTRA)
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.passed[0]
-        self.assertEqual(actual_record.test_name, "test_func")
-        self.assertIsNone(actual_record.details)
-        self.assertIsNone(actual_record.extras)
-
-    def test_assert_equal_fail(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_func(self):
-                asserts.assert_equal(1, 2, extras=MOCK_EXTRA)
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.failed[0]
-        self.assertEqual(actual_record.test_name, "test_func")
-        self.assertIn("1 != 2", actual_record.details)
-        self.assertEqual(actual_record.extras, MOCK_EXTRA)
-
-    def test_assert_equal_fail_with_msg(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_func(self):
-                asserts.assert_equal(
-                    1, 2, msg=MSG_EXPECTED_EXCEPTION, extras=MOCK_EXTRA
-                )
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.failed[0]
-        self.assertEqual(actual_record.test_name, "test_func")
-        expected_msg = "1 != 2 " + MSG_EXPECTED_EXCEPTION
-        self.assertIn(expected_msg, actual_record.details)
-        self.assertEqual(actual_record.extras, MOCK_EXTRA)
-
-    def test_assert_raises_pass(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_func(self):
-                with asserts.assert_raises(SomeError, extras=MOCK_EXTRA):
-                    raise SomeError(MSG_EXPECTED_EXCEPTION)
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.passed[0]
-        self.assertEqual(actual_record.test_name, "test_func")
-        self.assertIsNone(actual_record.details)
-        self.assertIsNone(actual_record.extras)
-
-    def test_assert_raises_regex_pass(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_func(self):
-                with asserts.assert_raises_regex(
-                    SomeError, expected_regex=MSG_EXPECTED_EXCEPTION, extras=MOCK_EXTRA
-                ):
-                    raise SomeError(MSG_EXPECTED_EXCEPTION)
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.passed[0]
-        self.assertEqual(actual_record.test_name, "test_func")
-        self.assertIsNone(actual_record.details)
-        self.assertIsNone(actual_record.extras)
-
-    def test_assert_raises_fail_with_noop(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_func(self):
-                with asserts.assert_raises_regex(
-                    SomeError, expected_regex=MSG_EXPECTED_EXCEPTION, extras=MOCK_EXTRA
-                ):
-                    pass
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.failed[0]
-        self.assertEqual(actual_record.test_name, "test_func")
-        self.assertEqual(actual_record.details, "SomeError not raised")
-        self.assertEqual(actual_record.extras, MOCK_EXTRA)
-
-    def test_assert_raises_fail_with_wrong_regex(self):
-        wrong_msg = "ha"
-
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_func(self):
-                with asserts.assert_raises_regex(
-                    SomeError, expected_regex=MSG_EXPECTED_EXCEPTION, extras=MOCK_EXTRA
-                ):
-                    raise SomeError(wrong_msg)
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.failed[0]
-        self.assertEqual(actual_record.test_name, "test_func")
-        expected_details = (
-            '"This is an expected exception." does not match ' '"%s"'
-        ) % wrong_msg
-        self.assertEqual(actual_record.details, expected_details)
-        self.assertEqual(actual_record.extras, MOCK_EXTRA)
-
-    def test_assert_raises_fail_with_wrong_error(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_func(self):
-                with asserts.assert_raises_regex(
-                    SomeError, expected_regex=MSG_EXPECTED_EXCEPTION, extras=MOCK_EXTRA
-                ):
-                    raise AttributeError(MSG_UNEXPECTED_EXCEPTION)
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run()
-        actual_record = bt_cls.results.error[0]
-        self.assertEqual(actual_record.test_name, "test_func")
-        self.assertEqual(actual_record.details, MSG_UNEXPECTED_EXCEPTION)
-        self.assertIsNone(actual_record.extras)
-
-    def test_explicit_pass(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_func(self):
-                asserts.explicit_pass(MSG_EXPECTED_EXCEPTION, extras=MOCK_EXTRA)
-                never_call()
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=["test_func"])
-        actual_record = bt_cls.results.passed[0]
-        self.assertEqual(actual_record.test_name, "test_func")
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertEqual(actual_record.extras, MOCK_EXTRA)
-
-    def test_implicit_pass(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_func(self):
-                pass
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=["test_func"])
-        actual_record = bt_cls.results.passed[0]
-        self.assertEqual(actual_record.test_name, "test_func")
-        self.assertIsNone(actual_record.details)
-        self.assertIsNone(actual_record.extras)
-
-    def test_skip(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_func(self):
-                asserts.skip(MSG_EXPECTED_EXCEPTION, extras=MOCK_EXTRA)
-                never_call()
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=["test_func"])
-        actual_record = bt_cls.results.skipped[0]
-        self.assertEqual(actual_record.test_name, "test_func")
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertEqual(actual_record.extras, MOCK_EXTRA)
-
-    def test_skip_if(self):
-        class MockBaseTest(base_test.BaseTestClass):
-            def test_func(self):
-                asserts.skip_if(False, MSG_UNEXPECTED_EXCEPTION)
-                asserts.skip_if(True, MSG_EXPECTED_EXCEPTION, extras=MOCK_EXTRA)
-                never_call()
-
-        bt_cls = MockBaseTest(self.test_run_config)
-        bt_cls.run(test_names=["test_func"])
-        actual_record = bt_cls.results.skipped[0]
-        self.assertEqual(actual_record.test_name, "test_func")
-        self.assertEqual(actual_record.details, MSG_EXPECTED_EXCEPTION)
-        self.assertEqual(actual_record.extras, MOCK_EXTRA)
-
-    def test_unpack_userparams_required(self):
-        """Missing a required param should raise an error."""
-        required = ["some_param"]
-        bc = base_test.BaseTestClass(self.test_run_config)
-        bc.unpack_userparams(required)
-        expected_value = self.test_run_config.user_params["some_param"]
-        self.assertEqual(bc.some_param, expected_value)
-
-    def test_unpack_userparams_required_missing(self):
-        """Missing a required param should raise an error."""
-        required = ["something"]
-        bc = base_test.BaseTestClass(self.test_run_config)
-        expected_msg = (
-            'Missing required user param "%s" in test ' "configuration."
-        ) % required[0]
-        with self.assertRaises(mobly_base_test.Error, msg=expected_msg):
-            bc.unpack_userparams(required)
-
-    def test_unpack_userparams_optional(self):
-        """If an optional param is specified, the value should be what's in the
-        config.
-        """
-        opt = ["some_param"]
-        bc = base_test.BaseTestClass(self.test_run_config)
-        bc.unpack_userparams(opt_param_names=opt)
-        expected_value = self.test_run_config.user_params["some_param"]
-        self.assertEqual(bc.some_param, expected_value)
-
-    def test_unpack_userparams_optional_with_default(self):
-        """If an optional param is specified with a default value, and the
-        param is not in the config, the value should be the default value.
-        """
-        bc = base_test.BaseTestClass(self.test_run_config)
-        bc.unpack_userparams(optional_thing="whatever")
-        self.assertEqual(bc.optional_thing, "whatever")
-
-    def test_unpack_userparams_default_overwrite_by_optional_param_list(self):
-        """If an optional param is specified in kwargs, and the param is in the
-        config, the value should be the one in the config.
-        """
-        bc = base_test.BaseTestClass(self.test_run_config)
-        bc.unpack_userparams(some_param="whatever")
-        expected_value = self.test_run_config.user_params["some_param"]
-        self.assertEqual(bc.some_param, expected_value)
-
-    def test_unpack_userparams_default_overwrite_by_required_param_list(self):
-        """If an optional param is specified in kwargs, the param is in the
-        required param list, and the param is not specified in the config, the
-        param's alue should be the default value and there should be no error
-        thrown.
-        """
-        bc = base_test.BaseTestClass(self.test_run_config)
-        bc.unpack_userparams(
-            req_param_names=["a_kwarg_param"], a_kwarg_param="whatever"
-        )
-        self.assertEqual(bc.a_kwarg_param, "whatever")
-
-    def test_unpack_userparams_optional_missing(self):
-        """Missing an optional param should not raise an error."""
-        opt = ["something"]
-        bc = base_test.BaseTestClass(self.test_run_config)
-        bc.unpack_userparams(opt_param_names=opt)
-
-    def test_unpack_userparams_basic(self):
-        """Required and optional params are unpacked properly."""
-        required = ["something"]
-        optional = ["something_else"]
-        configs = self.test_run_config.copy()
-        configs.user_params["something"] = 42
-        configs.user_params["something_else"] = 53
-        bc = base_test.BaseTestClass(configs)
-        bc.unpack_userparams(req_param_names=required, opt_param_names=optional)
-        self.assertEqual(bc.something, 42)
-        self.assertEqual(bc.something_else, 53)
-
-    def test_unpack_userparams_default_overwrite(self):
-        default_arg_val = "haha"
-        actual_arg_val = "wawa"
-        arg_name = "arg1"
-        configs = self.test_run_config.copy()
-        configs.user_params[arg_name] = actual_arg_val
-        bc = base_test.BaseTestClass(configs)
-        bc.unpack_userparams(opt_param_names=[arg_name], arg1=default_arg_val)
-        self.assertEqual(bc.arg1, actual_arg_val)
-
-    def test_unpack_userparams_default_None(self):
-        bc = base_test.BaseTestClass(self.test_run_config)
-        bc.unpack_userparams(arg1="haha")
-        self.assertEqual(bc.arg1, "haha")
-
-    def test_register_controller_no_config(self):
-        base_cls = base_test.BaseTestClass(self.test_run_config)
-        with self.assertRaisesRegexp(
-            signals.ControllerError, "No corresponding config found for"
-        ):
-            base_cls.register_controller(mock_controller)
-
-    def test_register_optional_controller_no_config(self):
-        base_cls = base_test.BaseTestClass(self.test_run_config)
-        self.assertIsNone(base_cls.register_controller(mock_controller, required=False))
-
-    def test_register_controller_third_party_dup_register(self):
-        """Verifies correctness of registration, internal tally of controllers
-        objects, and the right error happen when a controller module is
-        registered twice.
-        """
-        mock_test_config = self.test_run_config.copy()
-        mock_ctrlr_config_name = mock_controller.MOBLY_CONTROLLER_CONFIG_NAME
-        mock_test_config.controller_configs[mock_ctrlr_config_name] = [
-            "magic1",
-            "magic2",
-        ]
-        base_cls = base_test.BaseTestClass(mock_test_config)
-        base_cls.register_controller(mock_controller)
-        registered_name = "mock_controller"
-        controller_objects = base_cls._controller_manager._controller_objects
-        self.assertTrue(registered_name in controller_objects)
-        mock_ctrlrs = controller_objects[registered_name]
-        self.assertEqual(mock_ctrlrs[0].magic, "magic1")
-        self.assertEqual(mock_ctrlrs[1].magic, "magic2")
-        expected_msg = "Controller module .* has already been registered."
-        with self.assertRaisesRegexp(signals.ControllerError, expected_msg):
-            base_cls.register_controller(mock_controller)
-
-    def test_register_optional_controller_third_party_dup_register(self):
-        """Verifies correctness of registration, internal tally of controllers
-        objects, and the right error happen when an optional controller module
-        is registered twice.
-        """
-        mock_test_config = self.test_run_config.copy()
-        mock_ctrlr_config_name = mock_controller.MOBLY_CONTROLLER_CONFIG_NAME
-        mock_test_config.controller_configs[mock_ctrlr_config_name] = [
-            "magic1",
-            "magic2",
-        ]
-        base_cls = base_test.BaseTestClass(mock_test_config)
-        base_cls.register_controller(mock_controller, required=False)
-        expected_msg = "Controller module .* has already been registered."
-        with self.assertRaisesRegexp(signals.ControllerError, expected_msg):
-            base_cls.register_controller(mock_controller, required=False)
-
-    def test_register_controller_builtin_dup_register(self):
-        """Same as test_register_controller_third_party_dup_register, except
-        this is for a builtin controller module.
-        """
-        mock_test_config = self.test_run_config.copy()
-        mock_ctrlr_config_name = mock_controller.MOBLY_CONTROLLER_CONFIG_NAME
-        mock_ref_name = "haha"
-        setattr(mock_controller, "ACTS_CONTROLLER_REFERENCE_NAME", mock_ref_name)
-        try:
-            mock_ctrlr_ref_name = mock_controller.ACTS_CONTROLLER_REFERENCE_NAME
-            mock_test_config.controller_configs[mock_ctrlr_config_name] = [
-                "magic1",
-                "magic2",
-            ]
-            base_cls = base_test.BaseTestClass(mock_test_config)
-            base_cls.register_controller(mock_controller, builtin=True)
-            self.assertTrue(hasattr(base_cls, mock_ref_name))
-            self.assertTrue(
-                mock_controller.__name__
-                in base_cls._controller_manager._controller_objects
-            )
-            mock_ctrlrs = getattr(base_cls, mock_ctrlr_ref_name)
-            self.assertEqual(mock_ctrlrs[0].magic, "magic1")
-            self.assertEqual(mock_ctrlrs[1].magic, "magic2")
-            expected_msg = "Controller module .* has already been registered."
-            with self.assertRaisesRegexp(signals.ControllerError, expected_msg):
-                base_cls.register_controller(mock_controller, builtin=True)
-        finally:
-            delattr(mock_controller, "ACTS_CONTROLLER_REFERENCE_NAME")
-
-    def test_register_controller_no_get_info(self):
-        mock_test_config = self.test_run_config.copy()
-        mock_ctrlr_config_name = mock_controller.MOBLY_CONTROLLER_CONFIG_NAME
-        mock_ref_name = "haha"
-        get_info = getattr(mock_controller, "get_info")
-        delattr(mock_controller, "get_info")
-        try:
-            mock_test_config.controller_configs[mock_ctrlr_config_name] = [
-                "magic1",
-                "magic2",
-            ]
-            base_cls = base_test.BaseTestClass(mock_test_config)
-            base_cls.register_controller(mock_controller)
-            self.assertEqual(base_cls.results.controller_info, [])
-        finally:
-            setattr(mock_controller, "get_info", get_info)
-
-    def test_register_controller_return_value(self):
-        mock_test_config = self.test_run_config.copy()
-        mock_ctrlr_config_name = mock_controller.MOBLY_CONTROLLER_CONFIG_NAME
-        mock_test_config.controller_configs[mock_ctrlr_config_name] = [
-            "magic1",
-            "magic2",
-        ]
-        base_cls = base_test.BaseTestClass(mock_test_config)
-        magic_devices = base_cls.register_controller(mock_controller)
-        self.assertEqual(magic_devices[0].magic, "magic1")
-        self.assertEqual(magic_devices[1].magic, "magic2")
-
-    def test_handle_file_user_params_does_not_overwrite_existing_params(self):
-        test_run_config = self.test_run_config.copy()
-        test_run_config.user_params = {
-            "foo": ["good_value"],
-            "local_files": {"foo": ["bad_value"]},
-        }
-        test = base_test.BaseTestClass(test_run_config)
-
-        self.assertEqual(test.user_params["foo"], ["good_value"])
-
-    def test_handle_file_user_params_dumps_files_dict(self):
-        test_run_config = self.test_run_config.copy()
-        test_run_config.user_params = {"my_files": {"foo": ["good_value"]}}
-        test = base_test.BaseTestClass(test_run_config)
-
-        self.assertEqual(test.user_params["foo"], ["good_value"])
-
-    def test_handle_file_user_params_is_called_in_init(self):
-        test_run_config = self.test_run_config.copy()
-        test_run_config.user_params["files"] = {"file_a": ["/some/path"]}
-        test = base_test.BaseTestClass(test_run_config)
-
-        self.assertEqual(test.user_params["file_a"], ["/some/path"])
-
-
-if __name__ == "__main__":
-    unittest.main()
diff --git a/src/antlion/unit_tests/acts_host_utils_test.py b/src/antlion/unit_tests/acts_host_utils_test.py
deleted file mode 100755
index cc77f4f..0000000
--- a/src/antlion/unit_tests/acts_host_utils_test.py
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import socket
-import unittest
-
-from antlion.controllers.utils_lib import host_utils
-
-
-class ActsHostUtilsTest(unittest.TestCase):
-    """This test class has unit tests for the implementation of everything
-    under antlion.controllers.adb.
-    """
-
-    def test_detects_udp_port_in_use(self):
-        test_s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
-        test_s.bind(("localhost", 0))
-        port = test_s.getsockname()[1]
-        try:
-            self.assertFalse(host_utils.is_port_available(port))
-        finally:
-            test_s.close()
-
-    def test_detects_tcp_port_in_use(self):
-        test_s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-        test_s.bind(("localhost", 0))
-        port = test_s.getsockname()[1]
-        try:
-            self.assertFalse(host_utils.is_port_available(port))
-        finally:
-            test_s.close()
-
-
-if __name__ == "__main__":
-    unittest.main()
diff --git a/src/antlion/unit_tests/acts_records_test.py b/src/antlion/unit_tests/acts_records_test.py
deleted file mode 100755
index 7816c61..0000000
--- a/src/antlion/unit_tests/acts_records_test.py
+++ /dev/null
@@ -1,238 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-from mobly.records import ControllerInfoRecord
-
-from antlion import records
-from antlion import signals
-
-
-class ActsRecordsTest(unittest.TestCase):
-    """This test class tests the implementation of classes in antlion.records."""
-
-    def setUp(self):
-        self.tn = "test_name"
-        self.details = "Some details about the test execution."
-        self.float_extra = 12345.56789
-        self.json_extra = {"ha": "whatever"}
-
-    def verify_record(self, record, result, details, extras):
-        # Verify each field.
-        self.assertEqual(record.test_name, self.tn)
-        self.assertEqual(record.result, result)
-        self.assertEqual(record.details, details)
-        self.assertEqual(record.extras, extras)
-        self.assertTrue(record.begin_time, "begin time should not be empty.")
-        self.assertTrue(record.end_time, "end time should not be empty.")
-        # UID is not used at the moment, should always be None.
-        self.assertIsNone(record.uid)
-        # Verify to_dict.
-        d = {}
-        d[records.TestResultEnums.RECORD_NAME] = self.tn
-        d[records.TestResultEnums.RECORD_RESULT] = result
-        d[records.TestResultEnums.RECORD_DETAILS] = details
-        d[records.TestResultEnums.RECORD_EXTRAS] = extras
-        d[records.TestResultEnums.RECORD_BEGIN_TIME] = record.begin_time
-        d[records.TestResultEnums.RECORD_END_TIME] = record.end_time
-        d[records.TestResultEnums.RECORD_LOG_BEGIN_TIME] = record.log_begin_time
-        d[records.TestResultEnums.RECORD_LOG_END_TIME] = record.log_end_time
-        d[records.TestResultEnums.RECORD_UID] = None
-        d[records.TestResultEnums.RECORD_CLASS] = None
-        d[records.TestResultEnums.RECORD_EXTRA_ERRORS] = {}
-        d[records.TestResultEnums.RECORD_STACKTRACE] = record.stacktrace
-        actual_d = record.to_dict()
-        self.assertDictEqual(actual_d, d)
-        # Verify that these code paths do not cause crashes and yield non-empty
-        # results.
-        self.assertTrue(str(record), "str of the record should not be empty.")
-        self.assertTrue(repr(record), "the record's repr shouldn't be empty.")
-        self.assertTrue(
-            record.json_str(), ("json str of the record should " "not be empty.")
-        )
-
-    """ Begin of Tests """
-
-    def test_result_record_pass_none(self):
-        record = records.TestResultRecord(self.tn)
-        record.test_begin()
-        record.test_pass()
-        self.verify_record(
-            record=record,
-            result=records.TestResultEnums.TEST_RESULT_PASS,
-            details=None,
-            extras=None,
-        )
-
-    def test_result_record_pass_with_float_extra(self):
-        record = records.TestResultRecord(self.tn)
-        record.test_begin()
-        s = signals.TestPass(self.details, self.float_extra)
-        record.test_pass(s)
-        self.verify_record(
-            record=record,
-            result=records.TestResultEnums.TEST_RESULT_PASS,
-            details=self.details,
-            extras=self.float_extra,
-        )
-
-    def test_result_record_pass_with_json_extra(self):
-        record = records.TestResultRecord(self.tn)
-        record.test_begin()
-        s = signals.TestPass(self.details, self.json_extra)
-        record.test_pass(s)
-        self.verify_record(
-            record=record,
-            result=records.TestResultEnums.TEST_RESULT_PASS,
-            details=self.details,
-            extras=self.json_extra,
-        )
-
-    def test_result_record_fail_none(self):
-        record = records.TestResultRecord(self.tn)
-        record.test_begin()
-        record.test_fail()
-        self.verify_record(
-            record=record,
-            result=records.TestResultEnums.TEST_RESULT_FAIL,
-            details=None,
-            extras=None,
-        )
-
-    def test_result_record_fail_with_float_extra(self):
-        record = records.TestResultRecord(self.tn)
-        record.test_begin()
-        s = signals.TestFailure(self.details, self.float_extra)
-        record.test_fail(s)
-        self.verify_record(
-            record=record,
-            result=records.TestResultEnums.TEST_RESULT_FAIL,
-            details=self.details,
-            extras=self.float_extra,
-        )
-
-    def test_result_record_fail_with_json_extra(self):
-        record = records.TestResultRecord(self.tn)
-        record.test_begin()
-        s = signals.TestFailure(self.details, self.json_extra)
-        record.test_fail(s)
-        self.verify_record(
-            record=record,
-            result=records.TestResultEnums.TEST_RESULT_FAIL,
-            details=self.details,
-            extras=self.json_extra,
-        )
-
-    def test_result_record_skip_none(self):
-        record = records.TestResultRecord(self.tn)
-        record.test_begin()
-        record.test_skip()
-        self.verify_record(
-            record=record,
-            result=records.TestResultEnums.TEST_RESULT_SKIP,
-            details=None,
-            extras=None,
-        )
-
-    def test_result_record_skip_with_float_extra(self):
-        record = records.TestResultRecord(self.tn)
-        record.test_begin()
-        s = signals.TestSkip(self.details, self.float_extra)
-        record.test_skip(s)
-        self.verify_record(
-            record=record,
-            result=records.TestResultEnums.TEST_RESULT_SKIP,
-            details=self.details,
-            extras=self.float_extra,
-        )
-
-    def test_result_record_skip_with_json_extra(self):
-        record = records.TestResultRecord(self.tn)
-        record.test_begin()
-        s = signals.TestSkip(self.details, self.json_extra)
-        record.test_skip(s)
-        self.verify_record(
-            record=record,
-            result=records.TestResultEnums.TEST_RESULT_SKIP,
-            details=self.details,
-            extras=self.json_extra,
-        )
-
-    def test_result_add_operator_success(self):
-        record1 = records.TestResultRecord(self.tn)
-        record1.test_begin()
-        s = signals.TestPass(self.details, self.float_extra)
-        record1.test_pass(s)
-        tr1 = records.TestResult()
-        tr1.add_record(record1)
-        device1 = ControllerInfoRecord("TestClass", "MockDevice", "device1")
-        tr1.add_controller_info_record(device1)
-        record2 = records.TestResultRecord(self.tn)
-        record2.test_begin()
-        s = signals.TestPass(self.details, self.json_extra)
-        record2.test_pass(s)
-        tr2 = records.TestResult()
-        tr2.add_record(record2)
-        device2 = ControllerInfoRecord("TestClass", "MockDevice", "device2")
-        tr2.add_controller_info_record(device2)
-        tr2 += tr1
-        self.assertTrue(tr2.passed, [tr1, tr2])
-        self.assertTrue(tr2.controller_info, [device1, device2])
-
-    def test_result_add_operator_type_mismatch(self):
-        record1 = records.TestResultRecord(self.tn)
-        record1.test_begin()
-        s = signals.TestPass(self.details, self.float_extra)
-        record1.test_pass(s)
-        tr1 = records.TestResult()
-        tr1.add_record(record1)
-        expected_msg = "Operand .* of type .* is not a TestResult."
-        with self.assertRaisesRegexp(TypeError, expected_msg):
-            tr1 += "haha"
-
-    def test_is_all_pass(self):
-        s = signals.TestPass(self.details, self.float_extra)
-        record1 = records.TestResultRecord(self.tn)
-        record1.test_begin()
-        record1.test_pass(s)
-        s = signals.TestSkip(self.details, self.float_extra)
-        record2 = records.TestResultRecord(self.tn)
-        record2.test_begin()
-        record2.test_skip(s)
-        tr = records.TestResult()
-        tr.add_record(record1)
-        tr.add_record(record2)
-        tr.add_record(record1)
-        self.assertEqual(len(tr.passed), 2)
-        self.assertTrue(tr.is_all_pass)
-
-    def test_is_all_pass_negative(self):
-        s = signals.TestFailure(self.details, self.float_extra)
-        record1 = records.TestResultRecord(self.tn)
-        record1.test_begin()
-        record1.test_fail(s)
-        record2 = records.TestResultRecord(self.tn)
-        record2.test_begin()
-        record2.test_error(s)
-        tr = records.TestResult()
-        tr.add_record(record1)
-        tr.add_record(record2)
-        self.assertFalse(tr.is_all_pass)
-
-
-if __name__ == "__main__":
-    unittest.main()
diff --git a/src/antlion/unit_tests/acts_sniffer_test_config.json b/src/antlion/unit_tests/acts_sniffer_test_config.json
deleted file mode 100644
index 4ded36e..0000000
--- a/src/antlion/unit_tests/acts_sniffer_test_config.json
+++ /dev/null
@@ -1,20 +0,0 @@
-{
-    "testbed":
-    [
-        {
-            "_description": "ACTS sniffer sanity test bed, no device needed.",
-            "name": "SnifferSanity",
-            "Sniffer": [ {"Type": "local",
-                          "SubType": "tcpdump",
-                          "Interface": "wlan0",
-                          "BaseConfigs": {
-                              "channel": 6
-                          }}
-                       ]
-
-        }
-    ],
-    "logpath": "/tmp/logs",
-    "testpaths": ["./"]
-}
-
diff --git a/src/antlion/unit_tests/acts_test_decorators_test.py b/src/antlion/unit_tests/acts_test_decorators_test.py
deleted file mode 100755
index 7ffc83d..0000000
--- a/src/antlion/unit_tests/acts_test_decorators_test.py
+++ /dev/null
@@ -1,184 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import mock
-import shutil
-import tempfile
-import unittest
-import mock
-
-from mobly import config_parser as mobly_config_parser
-
-from antlion import base_test
-from antlion import signals
-from antlion import test_decorators
-from antlion import test_runner
-from antlion.controllers.sl4a_lib import rpc_client
-
-
-def return_true():
-    return True
-
-
-def return_false():
-    return False
-
-
-def raise_pass():
-    raise signals.TestPass("")
-
-
-def raise_failure():
-    raise signals.TestFailure("")
-
-
-def raise_sl4a():
-    raise rpc_client.Sl4aException("")
-
-
-def raise_generic():
-    raise Exception("")
-
-
-class MockTest(base_test.BaseTestClass):
-    TEST_CASE_LIST = "test_run_mock_test"
-    TEST_LOGIC_ATTR = "test_logic"
-
-    def test_run_mock_test(self):
-        getattr(MockTest, MockTest.TEST_LOGIC_ATTR, None)()
-
-
-class TestDecoratorIntegrationTests(unittest.TestCase):
-    @classmethod
-    def setUpClass(cls):
-        cls.tmp_dir = tempfile.mkdtemp()
-        cls.MOCK_CONFIG = mobly_config_parser.TestRunConfig()
-        cls.MOCK_CONFIG.testbed_name = "SampleTestBed"
-        cls.MOCK_CONFIG.log_path = cls.tmp_dir
-
-        cls.MOCK_TEST_RUN_LIST = [(MockTest.__name__, [MockTest.TEST_CASE_LIST])]
-
-    @classmethod
-    def tearDownClass(cls):
-        shutil.rmtree(cls.tmp_dir)
-
-    def _run_with_test_logic(self, func):
-        if hasattr(MockTest, MockTest.TEST_LOGIC_ATTR):
-            delattr(MockTest, MockTest.TEST_LOGIC_ATTR)
-        setattr(MockTest, MockTest.TEST_LOGIC_ATTR, func)
-        self.test_runner = test_runner.TestRunner(
-            self.MOCK_CONFIG, self.MOCK_TEST_RUN_LIST
-        )
-        self.test_runner.run(MockTest)
-
-    def _validate_results_has_extra(self, result, extra_key, extra_value):
-        results = self.test_runner.results
-        self.assertGreaterEqual(
-            len(results.executed), 1, "Expected at least one executed test."
-        )
-        record = results.executed[0]
-        self.assertIsNotNone(record.extras, "Expected the test record to have extras.")
-        self.assertEqual(record.extras[extra_key], extra_value)
-
-    def test_mock_test_with_raise_pass(self):
-        self._run_with_test_logic(raise_pass)
-
-    def test_mock_test_with_raise_generic(self):
-        self._run_with_test_logic(raise_generic)
-
-
-class RepeatedTestTests(unittest.TestCase):
-    def test_all_error_types_count_toward_failures(self):
-        def result_selector(results, _):
-            self.assertIsInstance(results[0], AssertionError)
-            self.assertIsInstance(results[1], signals.TestFailure)
-            self.assertIsInstance(results[2], signals.TestError)
-            self.assertIsInstance(results[3], IndexError)
-            raise signals.TestPass("Expected failures occurred")
-
-        @test_decorators.repeated_test(1, 3, result_selector)
-        def test_case(_, attempt_number):
-            if attempt_number == 1:
-                raise AssertionError()
-            elif attempt_number == 2:
-                raise signals.TestFailure("Failed")
-            elif attempt_number == 3:
-                raise signals.TestError("Error")
-            else:
-                # Note that any Exception that does not fall into another bucket
-                # is also considered a failure
-                raise IndexError("Bad index")
-
-        with self.assertRaises(signals.TestPass):
-            test_case(mock.Mock())
-
-    def test_passes_stop_repeating_the_test_case(self):
-        def result_selector(results, _):
-            self.assertEqual(len(results), 3)
-            for result in results:
-                self.assertIsInstance(result, signals.TestPass)
-            raise signals.TestPass("Expected passes occurred")
-
-        @test_decorators.repeated_test(3, 0, result_selector)
-        def test_case(*_):
-            raise signals.TestPass("Passed")
-
-        with self.assertRaises(signals.TestPass):
-            test_case(mock.Mock())
-
-    def test_abort_signals_are_uncaught(self):
-        @test_decorators.repeated_test(3, 0)
-        def test_case(*_):
-            raise signals.TestAbortClass("Abort All")
-
-        with self.assertRaises(signals.TestAbortClass):
-            test_case(mock.Mock())
-
-    def test_keyboard_interrupt_is_uncaught(self):
-        @test_decorators.repeated_test(3, 0)
-        def test_case(*_):
-            raise KeyboardInterrupt()
-
-        with self.assertRaises(KeyboardInterrupt):
-            test_case(mock.Mock())
-
-    def test_teardown_and_setup_are_called_between_test_cases(self):
-        mock_test_class = mock.Mock()
-
-        @test_decorators.repeated_test(1, 1)
-        def test_case(*_):
-            raise signals.TestFailure("Failed")
-
-        with self.assertRaises(signals.TestFailure):
-            test_case(mock_test_class)
-
-        self.assertTrue(mock_test_class.setup_test.called)
-        self.assertTrue(mock_test_class.teardown_test.called)
-
-    def test_result_selector_returned_value_gets_raised(self):
-        def result_selector(*_):
-            return signals.TestPass("Expect this to be raised.")
-
-        @test_decorators.repeated_test(3, 0, result_selector=result_selector)
-        def test_case(*_):
-            raise signals.TestFailure("Result selector ignores this.")
-
-        with self.assertRaises(signals.TestPass):
-            test_case(mock.Mock())
-
-
-if __name__ == "__main__":
-    unittest.main()
diff --git a/src/antlion/unit_tests/acts_test_runner_test.py b/src/antlion/unit_tests/acts_test_runner_test.py
deleted file mode 100755
index 9b3cc58..0000000
--- a/src/antlion/unit_tests/acts_test_runner_test.py
+++ /dev/null
@@ -1,129 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import mock
-import os
-import shutil
-import tempfile
-import unittest
-
-from mobly.config_parser import TestRunConfig
-
-from antlion import keys
-from antlion import test_runner
-
-import acts_android_device_test
-import mock_controller
-import IntegrationTest
-
-
-class ActsTestRunnerTest(unittest.TestCase):
-    """This test class has unit tests for the implementation of everything
-    under antlion.test_runner.
-    """
-
-    def setUp(self):
-        self.tmp_dir = tempfile.mkdtemp()
-        self.base_mock_test_config = TestRunConfig()
-        self.base_mock_test_config.testbed_name = "SampleTestBed"
-        self.base_mock_test_config.log_path = self.tmp_dir
-        self.base_mock_test_config.controller_configs = {
-            "testpaths": [os.path.dirname(IntegrationTest.__file__)]
-        }
-        self.base_mock_test_config.user_params = {"icecream": 42, "extra_param": "haha"}
-        self.mock_run_list = [("SampleTest", None)]
-
-    def tearDown(self):
-        shutil.rmtree(self.tmp_dir)
-
-    def test_run_twice(self):
-        """Verifies that:
-        1. Repeated run works properly.
-        2. The original configuration is not altered if a test controller
-           module modifies configuration.
-        """
-        mock_test_config = self.base_mock_test_config.copy()
-        tb_key = keys.Config.key_testbed.value
-        mock_ctrlr_config_name = mock_controller.MOBLY_CONTROLLER_CONFIG_NAME
-        my_config = [
-            {"serial": "xxxx", "magic": "Magic1"},
-            {"serial": "xxxx", "magic": "Magic2"},
-        ]
-        mock_test_config.controller_configs[mock_ctrlr_config_name] = my_config
-        tr = test_runner.TestRunner(mock_test_config, [("IntegrationTest", None)])
-        tr.run()
-        tr.run()
-        tr.stop()
-        results = tr.results.summary_dict()
-        self.assertEqual(results["Requested"], 2)
-        self.assertEqual(results["Executed"], 2)
-        self.assertEqual(results["Passed"], 2)
-
-    @mock.patch(
-        "antlion.controllers.adb.AdbProxy",
-        return_value=acts_android_device_test.MockAdbProxy(1, return_value=""),
-    )
-    @mock.patch(
-        "antlion.controllers.fastboot.FastbootProxy",
-        return_value=acts_android_device_test.MockFastbootProxy(1),
-    )
-    @mock.patch(
-        "antlion.controllers.android_device.list_adb_devices", return_value=["1"]
-    )
-    @mock.patch(
-        "antlion.controllers.android_device.get_all_instances",
-        return_value=acts_android_device_test.get_mock_ads(1),
-    )
-    @mock.patch(
-        "antlion.controllers.android_device.AndroidDevice.ensure_screen_on",
-        return_value=True,
-    )
-    @mock.patch(
-        "antlion.controllers.android_device.AndroidDevice.exit_setup_wizard",
-        return_value=True,
-    )
-    @mock.patch("antlion.controllers.android_device.AndroidDevice.start_services")
-    def test_run_two_test_classes(self, *_):
-        """Verifies that running more than one test class in one test run works
-        properly.
-
-        This requires using a built-in controller module. Using AndroidDevice
-        module since it has all the mocks needed already.
-        """
-        mock_test_config = self.base_mock_test_config.copy()
-        tb_key = keys.Config.key_testbed.value
-        mock_ctrlr_config_name = mock_controller.MOBLY_CONTROLLER_CONFIG_NAME
-        my_config = [
-            {"serial": "xxxx", "magic": "Magic1"},
-            {"serial": "xxxx", "magic": "Magic2"},
-        ]
-        mock_test_config.controller_configs[mock_ctrlr_config_name] = my_config
-        mock_test_config.controller_configs["AndroidDevice"] = [
-            {"serial": "1", "skip_sl4a": True}
-        ]
-        tr = test_runner.TestRunner(
-            mock_test_config, [("IntegrationTest", None), ("IntegrationTest", None)]
-        )
-        tr.run()
-        tr.stop()
-        results = tr.results.summary_dict()
-        self.assertEqual(results["Requested"], 2)
-        self.assertEqual(results["Executed"], 2)
-        self.assertEqual(results["Passed"], 2)
-
-
-if __name__ == "__main__":
-    unittest.main()
diff --git a/src/antlion/unit_tests/acts_utils_test.py b/src/antlion/unit_tests/acts_utils_test.py
deleted file mode 100755
index a3abe2e..0000000
--- a/src/antlion/unit_tests/acts_utils_test.py
+++ /dev/null
@@ -1,609 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-import subprocess
-import time
-import unittest
-
-import mock
-
-from antlion import utils
-from antlion import signals
-from antlion.capabilities.ssh import SSHConfig, SSHResult
-from antlion.controllers.adb_lib.error import AdbError
-from antlion.controllers.android_device import AndroidDevice
-from antlion.controllers.fuchsia_device import FuchsiaDevice
-from antlion.controllers.fuchsia_lib.sl4f import SL4F
-from antlion.controllers.fuchsia_lib.ssh import FuchsiaSSHProvider
-from antlion.controllers.utils_lib.ssh.connection import SshConnection
-from antlion.libs.proc import job
-
-PROVISIONED_STATE_GOOD = 1
-
-MOCK_ENO1_IP_ADDRESSES = """100.127.110.79
-2401:fa00:480:7a00:8d4f:85ff:cc5c:787e
-2401:fa00:480:7a00:459:b993:fcbf:1419
-fe80::c66d:3c75:2cec:1d72"""
-
-MOCK_WLAN1_IP_ADDRESSES = ""
-
-FUCHSIA_INTERFACES = {
-    "id": "1",
-    "result": [
-        {
-            "id": 1,
-            "name": "lo",
-            "ipv4_addresses": [
-                [127, 0, 0, 1],
-            ],
-            "ipv6_addresses": [
-                [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
-            ],
-            "online": True,
-            "mac": [0, 0, 0, 0, 0, 0],
-        },
-        {
-            "id": 2,
-            "name": "eno1",
-            "ipv4_addresses": [
-                [100, 127, 110, 79],
-            ],
-            "ipv6_addresses": [
-                [254, 128, 0, 0, 0, 0, 0, 0, 198, 109, 60, 117, 44, 236, 29, 114],
-                [36, 1, 250, 0, 4, 128, 122, 0, 141, 79, 133, 255, 204, 92, 120, 126],
-                [36, 1, 250, 0, 4, 128, 122, 0, 4, 89, 185, 147, 252, 191, 20, 25],
-            ],
-            "online": True,
-            "mac": [0, 224, 76, 5, 76, 229],
-        },
-        {
-            "id": 3,
-            "name": "wlanxc0",
-            "ipv4_addresses": [],
-            "ipv6_addresses": [
-                [254, 128, 0, 0, 0, 0, 0, 0, 96, 255, 93, 96, 52, 253, 253, 243],
-                [254, 128, 0, 0, 0, 0, 0, 0, 70, 7, 11, 255, 254, 118, 126, 192],
-            ],
-            "online": False,
-            "mac": [68, 7, 11, 118, 126, 192],
-        },
-    ],
-    "error": None,
-}
-
-CORRECT_FULL_IP_LIST = {
-    "ipv4_private": [],
-    "ipv4_public": ["100.127.110.79"],
-    "ipv6_link_local": ["fe80::c66d:3c75:2cec:1d72"],
-    "ipv6_private_local": [],
-    "ipv6_public": [
-        "2401:fa00:480:7a00:8d4f:85ff:cc5c:787e",
-        "2401:fa00:480:7a00:459:b993:fcbf:1419",
-    ],
-}
-
-CORRECT_EMPTY_IP_LIST = {
-    "ipv4_private": [],
-    "ipv4_public": [],
-    "ipv6_link_local": [],
-    "ipv6_private_local": [],
-    "ipv6_public": [],
-}
-
-
-class ByPassSetupWizardTests(unittest.TestCase):
-    """This test class for unit testing antlion.utils.bypass_setup_wizard."""
-
-    def test_start_standing_subproc(self):
-        with self.assertRaisesRegex(utils.ActsUtilsError, "Process .* has terminated"):
-            utils.start_standing_subprocess("sleep 0", check_health_delay=0.1)
-
-    def test_stop_standing_subproc(self):
-        p = utils.start_standing_subprocess("sleep 0")
-        time.sleep(0.1)
-        with self.assertRaisesRegex(utils.ActsUtilsError, "Process .* has terminated"):
-            utils.stop_standing_subprocess(p)
-
-    @mock.patch("time.sleep")
-    def test_bypass_setup_wizard_no_complications(self, _):
-        ad = mock.Mock()
-        ad.adb.shell.side_effect = [
-            # Return value for SetupWizardExitActivity
-            BypassSetupWizardReturn.NO_COMPLICATIONS,
-            # Return value for device_provisioned
-            PROVISIONED_STATE_GOOD,
-        ]
-        ad.adb.return_state = BypassSetupWizardReturn.NO_COMPLICATIONS
-        self.assertTrue(utils.bypass_setup_wizard(ad))
-        self.assertFalse(
-            ad.adb.root_adb.called,
-            "The root command should not be called if there are no " "complications.",
-        )
-
-    @mock.patch("time.sleep")
-    def test_bypass_setup_wizard_unrecognized_error(self, _):
-        ad = mock.Mock()
-        ad.adb.shell.side_effect = [
-            # Return value for SetupWizardExitActivity
-            BypassSetupWizardReturn.UNRECOGNIZED_ERR,
-            # Return value for device_provisioned
-            PROVISIONED_STATE_GOOD,
-        ]
-        with self.assertRaises(AdbError):
-            utils.bypass_setup_wizard(ad)
-        self.assertFalse(
-            ad.adb.root_adb.called,
-            "The root command should not be called if we do not have a "
-            "codepath for recovering from the failure.",
-        )
-
-    @mock.patch("time.sleep")
-    def test_bypass_setup_wizard_need_root_access(self, _):
-        ad = mock.Mock()
-        ad.adb.shell.side_effect = [
-            # Return value for SetupWizardExitActivity
-            BypassSetupWizardReturn.ROOT_ADB_NO_COMP,
-            # Return value for rooting the device
-            BypassSetupWizardReturn.NO_COMPLICATIONS,
-            # Return value for device_provisioned
-            PROVISIONED_STATE_GOOD,
-        ]
-
-        utils.bypass_setup_wizard(ad)
-
-        self.assertTrue(
-            ad.adb.root_adb_called,
-            "The command required root access, but the device was never " "rooted.",
-        )
-
-    @mock.patch("time.sleep")
-    def test_bypass_setup_wizard_need_root_already_skipped(self, _):
-        ad = mock.Mock()
-        ad.adb.shell.side_effect = [
-            # Return value for SetupWizardExitActivity
-            BypassSetupWizardReturn.ROOT_ADB_SKIPPED,
-            # Return value for SetupWizardExitActivity after root
-            BypassSetupWizardReturn.ALREADY_BYPASSED,
-            # Return value for device_provisioned
-            PROVISIONED_STATE_GOOD,
-        ]
-        self.assertTrue(utils.bypass_setup_wizard(ad))
-        self.assertTrue(ad.adb.root_adb_called)
-
-    @mock.patch("time.sleep")
-    def test_bypass_setup_wizard_root_access_still_fails(self, _):
-        ad = mock.Mock()
-        ad.adb.shell.side_effect = [
-            # Return value for SetupWizardExitActivity
-            BypassSetupWizardReturn.ROOT_ADB_FAILS,
-            # Return value for SetupWizardExitActivity after root
-            BypassSetupWizardReturn.UNRECOGNIZED_ERR,
-            # Return value for device_provisioned
-            PROVISIONED_STATE_GOOD,
-        ]
-
-        with self.assertRaises(AdbError):
-            utils.bypass_setup_wizard(ad)
-        self.assertTrue(ad.adb.root_adb_called)
-
-
-class BypassSetupWizardReturn:
-    # No complications. Bypass works the first time without issues.
-    NO_COMPLICATIONS = (
-        "Starting: Intent { cmp=com.google.android.setupwizard/"
-        ".SetupWizardExitActivity }"
-    )
-
-    # Fail with doesn't need to be skipped/was skipped already.
-    ALREADY_BYPASSED = AdbError(
-        "", "ADB_CMD_OUTPUT:0", "Error type 3\n" "Error: Activity class", 1
-    )
-    # Fail with different error.
-    UNRECOGNIZED_ERR = AdbError(
-        "", "ADB_CMD_OUTPUT:0", "Error type 4\n" "Error: Activity class", 0
-    )
-    # Fail, get root access, then no complications arise.
-    ROOT_ADB_NO_COMP = AdbError(
-        "",
-        "ADB_CMD_OUTPUT:255",
-        "Security exception: Permission Denial: "
-        "starting Intent { flg=0x10000000 "
-        "cmp=com.google.android.setupwizard/"
-        ".SetupWizardExitActivity } from null "
-        "(pid=5045, uid=2000) not exported from uid "
-        "10000",
-        0,
-    )
-    # Even with root access, the bypass setup wizard doesn't need to be skipped.
-    ROOT_ADB_SKIPPED = AdbError(
-        "",
-        "ADB_CMD_OUTPUT:255",
-        "Security exception: Permission Denial: "
-        "starting Intent { flg=0x10000000 "
-        "cmp=com.google.android.setupwizard/"
-        ".SetupWizardExitActivity } from null "
-        "(pid=5045, uid=2000) not exported from "
-        "uid 10000",
-        0,
-    )
-    # Even with root access, the bypass setup wizard fails
-    ROOT_ADB_FAILS = AdbError(
-        "",
-        "ADB_CMD_OUTPUT:255",
-        "Security exception: Permission Denial: starting Intent { "
-        "flg=0x10000000 cmp=com.google.android.setupwizard/"
-        ".SetupWizardExitActivity } from null (pid=5045, uid=2000) not "
-        "exported from uid 10000",
-        0,
-    )
-
-
-class ConcurrentActionsTest(unittest.TestCase):
-    """Tests antlion.utils.run_concurrent_actions and related functions."""
-
-    @staticmethod
-    def function_returns_passed_in_arg(arg):
-        return arg
-
-    @staticmethod
-    def function_raises_passed_in_exception_type(exception_type):
-        raise exception_type
-
-    def test_run_concurrent_actions_no_raise_returns_proper_return_values(self):
-        """Tests run_concurrent_actions_no_raise returns in the correct order.
-
-        Each function passed into run_concurrent_actions_no_raise returns the
-        values returned from each individual callable in the order passed in.
-        """
-        ret_values = utils.run_concurrent_actions_no_raise(
-            lambda: self.function_returns_passed_in_arg("ARG1"),
-            lambda: self.function_returns_passed_in_arg("ARG2"),
-            lambda: self.function_returns_passed_in_arg("ARG3"),
-        )
-
-        self.assertEqual(len(ret_values), 3)
-        self.assertEqual(ret_values[0], "ARG1")
-        self.assertEqual(ret_values[1], "ARG2")
-        self.assertEqual(ret_values[2], "ARG3")
-
-    def test_run_concurrent_actions_no_raise_returns_raised_exceptions(self):
-        """Tests run_concurrent_actions_no_raise returns raised exceptions.
-
-        Instead of allowing raised exceptions to be raised in the main thread,
-        this function should capture the exception and return them in the slot
-        the return value should have been returned in.
-        """
-        ret_values = utils.run_concurrent_actions_no_raise(
-            lambda: self.function_raises_passed_in_exception_type(IndexError),
-            lambda: self.function_raises_passed_in_exception_type(KeyError),
-        )
-
-        self.assertEqual(len(ret_values), 2)
-        self.assertEqual(ret_values[0].__class__, IndexError)
-        self.assertEqual(ret_values[1].__class__, KeyError)
-
-    def test_run_concurrent_actions_returns_proper_return_values(self):
-        """Tests run_concurrent_actions returns in the correct order.
-
-        Each function passed into run_concurrent_actions returns the values
-        returned from each individual callable in the order passed in.
-        """
-
-        ret_values = utils.run_concurrent_actions(
-            lambda: self.function_returns_passed_in_arg("ARG1"),
-            lambda: self.function_returns_passed_in_arg("ARG2"),
-            lambda: self.function_returns_passed_in_arg("ARG3"),
-        )
-
-        self.assertEqual(len(ret_values), 3)
-        self.assertEqual(ret_values[0], "ARG1")
-        self.assertEqual(ret_values[1], "ARG2")
-        self.assertEqual(ret_values[2], "ARG3")
-
-    def test_run_concurrent_actions_raises_exceptions(self):
-        """Tests run_concurrent_actions raises exceptions from given actions."""
-        with self.assertRaises(KeyError):
-            utils.run_concurrent_actions(
-                lambda: self.function_returns_passed_in_arg("ARG1"),
-                lambda: self.function_raises_passed_in_exception_type(KeyError),
-            )
-
-    def test_test_concurrent_actions_raises_non_test_failure(self):
-        """Tests test_concurrent_actions raises the given exception."""
-        with self.assertRaises(KeyError):
-            utils.test_concurrent_actions(
-                lambda: self.function_raises_passed_in_exception_type(KeyError),
-                failure_exceptions=signals.TestFailure,
-            )
-
-    def test_test_concurrent_actions_raises_test_failure(self):
-        """Tests test_concurrent_actions raises the given exception."""
-        with self.assertRaises(signals.TestFailure):
-            utils.test_concurrent_actions(
-                lambda: self.function_raises_passed_in_exception_type(KeyError),
-                failure_exceptions=KeyError,
-            )
-
-
-class SuppressLogOutputTest(unittest.TestCase):
-    """Tests SuppressLogOutput"""
-
-    def test_suppress_log_output(self):
-        """Tests that the SuppressLogOutput context manager removes handlers
-        of the specified levels upon entry and re-adds handlers upon exit.
-        """
-        handlers = [
-            logging.NullHandler(level=lvl)
-            for lvl in (logging.DEBUG, logging.INFO, logging.ERROR)
-        ]
-        log = logging.getLogger("test_log")
-        for handler in handlers:
-            log.addHandler(handler)
-        with utils.SuppressLogOutput(log, [logging.INFO, logging.ERROR]):
-            self.assertTrue(
-                any(handler.level == logging.DEBUG for handler in log.handlers)
-            )
-            self.assertFalse(
-                any(
-                    handler.level in (logging.INFO, logging.ERROR)
-                    for handler in log.handlers
-                )
-            )
-        self.assertCountEqual(handlers, log.handlers)
-
-
-class IpAddressUtilTest(unittest.TestCase):
-    def test_positive_ipv4_normal_address(self):
-        ip_address = "192.168.1.123"
-        self.assertTrue(utils.is_valid_ipv4_address(ip_address))
-
-    def test_positive_ipv4_any_address(self):
-        ip_address = "0.0.0.0"
-        self.assertTrue(utils.is_valid_ipv4_address(ip_address))
-
-    def test_positive_ipv4_broadcast(self):
-        ip_address = "255.255.255.0"
-        self.assertTrue(utils.is_valid_ipv4_address(ip_address))
-
-    def test_negative_ipv4_with_ipv6_address(self):
-        ip_address = "fe80::f693:9fff:fef4:1ac"
-        self.assertFalse(utils.is_valid_ipv4_address(ip_address))
-
-    def test_negative_ipv4_with_invalid_string(self):
-        ip_address = "fdsafdsafdsafdsf"
-        self.assertFalse(utils.is_valid_ipv4_address(ip_address))
-
-    def test_negative_ipv4_with_invalid_number(self):
-        ip_address = "192.168.500.123"
-        self.assertFalse(utils.is_valid_ipv4_address(ip_address))
-
-    def test_positive_ipv6(self):
-        ip_address = "fe80::f693:9fff:fef4:1ac"
-        self.assertTrue(utils.is_valid_ipv6_address(ip_address))
-
-    def test_positive_ipv6_link_local(self):
-        ip_address = "fe80::"
-        self.assertTrue(utils.is_valid_ipv6_address(ip_address))
-
-    def test_negative_ipv6_with_ipv4_address(self):
-        ip_address = "192.168.1.123"
-        self.assertFalse(utils.is_valid_ipv6_address(ip_address))
-
-    def test_negative_ipv6_invalid_characters(self):
-        ip_address = "fe80:jkyr:f693:9fff:fef4:1ac"
-        self.assertFalse(utils.is_valid_ipv6_address(ip_address))
-
-    def test_negative_ipv6_invalid_string(self):
-        ip_address = "fdsafdsafdsafdsf"
-        self.assertFalse(utils.is_valid_ipv6_address(ip_address))
-
-    @mock.patch("antlion.libs.proc.job.run")
-    def test_local_get_interface_ip_addresses_full(self, job_mock):
-        job_mock.side_effect = [
-            job.Result(stdout=bytes(MOCK_ENO1_IP_ADDRESSES, "utf-8"), encoding="utf-8"),
-        ]
-        self.assertEqual(
-            utils.get_interface_ip_addresses(job, "eno1"), CORRECT_FULL_IP_LIST
-        )
-
-    @mock.patch("antlion.libs.proc.job.run")
-    def test_local_get_interface_ip_addresses_empty(self, job_mock):
-        job_mock.side_effect = [
-            job.Result(
-                stdout=bytes(MOCK_WLAN1_IP_ADDRESSES, "utf-8"), encoding="utf-8"
-            ),
-        ]
-        self.assertEqual(
-            utils.get_interface_ip_addresses(job, "wlan1"), CORRECT_EMPTY_IP_LIST
-        )
-
-    @mock.patch("antlion.controllers.utils_lib.ssh.connection.SshConnection.run")
-    def test_ssh_get_interface_ip_addresses_full(self, ssh_mock):
-        ssh_mock.side_effect = [
-            job.Result(stdout=bytes(MOCK_ENO1_IP_ADDRESSES, "utf-8"), encoding="utf-8"),
-        ]
-        self.assertEqual(
-            utils.get_interface_ip_addresses(SshConnection("mock_settings"), "eno1"),
-            CORRECT_FULL_IP_LIST,
-        )
-
-    @mock.patch("antlion.controllers.utils_lib.ssh.connection.SshConnection.run")
-    def test_ssh_get_interface_ip_addresses_empty(self, ssh_mock):
-        ssh_mock.side_effect = [
-            job.Result(
-                stdout=bytes(MOCK_WLAN1_IP_ADDRESSES, "utf-8"), encoding="utf-8"
-            ),
-        ]
-        self.assertEqual(
-            utils.get_interface_ip_addresses(SshConnection("mock_settings"), "wlan1"),
-            CORRECT_EMPTY_IP_LIST,
-        )
-
-    @mock.patch("antlion.controllers.adb.AdbProxy")
-    @mock.patch.object(AndroidDevice, "is_bootloader", return_value=True)
-    def test_android_get_interface_ip_addresses_full(self, is_bootloader, adb_mock):
-        adb_mock().shell.side_effect = [
-            MOCK_ENO1_IP_ADDRESSES,
-        ]
-        self.assertEqual(
-            utils.get_interface_ip_addresses(AndroidDevice(), "eno1"),
-            CORRECT_FULL_IP_LIST,
-        )
-
-    @mock.patch("antlion.controllers.adb.AdbProxy")
-    @mock.patch.object(AndroidDevice, "is_bootloader", return_value=True)
-    def test_android_get_interface_ip_addresses_empty(self, is_bootloader, adb_mock):
-        adb_mock().shell.side_effect = [
-            MOCK_WLAN1_IP_ADDRESSES,
-        ]
-        self.assertEqual(
-            utils.get_interface_ip_addresses(AndroidDevice(), "wlan1"),
-            CORRECT_EMPTY_IP_LIST,
-        )
-
-    @mock.patch(
-        "antlion.controllers.fuchsia_device.FuchsiaDevice.sl4f",
-        new_callable=mock.PropertyMock,
-    )
-    @mock.patch(
-        "antlion.controllers.fuchsia_device.FuchsiaDevice.ffx",
-        new_callable=mock.PropertyMock,
-    )
-    @mock.patch("antlion.controllers.fuchsia_lib.sl4f.wait_for_port")
-    @mock.patch("antlion.controllers.fuchsia_lib.ssh.FuchsiaSSHProvider.run")
-    @mock.patch("antlion.capabilities.ssh.SSHProvider.wait_until_reachable")
-    @mock.patch(
-        "antlion.controllers.fuchsia_device." "FuchsiaDevice._generate_ssh_config"
-    )
-    @mock.patch(
-        "antlion.controllers."
-        "fuchsia_lib.netstack.netstack_lib."
-        "FuchsiaNetstackLib.netstackListInterfaces"
-    )
-    def test_fuchsia_get_interface_ip_addresses_full(
-        self,
-        list_interfaces_mock,
-        generate_ssh_config_mock,
-        ssh_wait_until_reachable_mock,
-        ssh_run_mock,
-        wait_for_port_mock,
-        ffx_mock,
-        sl4f_mock,
-    ):
-        # Configure the log path which is required by ACTS logger.
-        logging.log_path = "/tmp/unit_test_garbage"
-
-        ssh = FuchsiaSSHProvider(SSHConfig("192.168.1.1", 22, "/dev/null"))
-        ssh_run_mock.return_value = SSHResult(
-            subprocess.CompletedProcess([], 0, stdout=b"", stderr=b"")
-        )
-
-        # Don't try to wait for the SL4F server to start; it's not being used.
-        wait_for_port_mock.return_value = None
-
-        sl4f_mock.return_value = SL4F(ssh, "http://192.168.1.1:80")
-        ssh_wait_until_reachable_mock.return_value = None
-
-        list_interfaces_mock.return_value = FUCHSIA_INTERFACES
-        self.assertEqual(
-            utils.get_interface_ip_addresses(
-                FuchsiaDevice({"ip": "192.168.1.1"}), "eno1"
-            ),
-            CORRECT_FULL_IP_LIST,
-        )
-
-    @mock.patch(
-        "antlion.controllers.fuchsia_device.FuchsiaDevice.sl4f",
-        new_callable=mock.PropertyMock,
-    )
-    @mock.patch(
-        "antlion.controllers.fuchsia_device.FuchsiaDevice.ffx",
-        new_callable=mock.PropertyMock,
-    )
-    @mock.patch("antlion.controllers.fuchsia_lib.sl4f.wait_for_port")
-    @mock.patch("antlion.controllers.fuchsia_lib.ssh.FuchsiaSSHProvider.run")
-    @mock.patch("antlion.capabilities.ssh.SSHProvider.wait_until_reachable")
-    @mock.patch(
-        "antlion.controllers.fuchsia_device." "FuchsiaDevice._generate_ssh_config"
-    )
-    @mock.patch(
-        "antlion.controllers."
-        "fuchsia_lib.netstack.netstack_lib."
-        "FuchsiaNetstackLib.netstackListInterfaces"
-    )
-    def test_fuchsia_get_interface_ip_addresses_empty(
-        self,
-        list_interfaces_mock,
-        generate_ssh_config_mock,
-        ssh_wait_until_reachable_mock,
-        ssh_run_mock,
-        wait_for_port_mock,
-        ffx_mock,
-        sl4f_mock,
-    ):
-        # Configure the log path which is required by ACTS logger.
-        logging.log_path = "/tmp/unit_test_garbage"
-
-        ssh = FuchsiaSSHProvider(SSHConfig("192.168.1.1", 22, "/dev/null"))
-        ssh_run_mock.return_value = SSHResult(
-            subprocess.CompletedProcess([], 0, stdout=b"", stderr=b"")
-        )
-
-        # Don't try to wait for the SL4F server to start; it's not being used.
-        wait_for_port_mock.return_value = None
-        ssh_wait_until_reachable_mock.return_value = None
-        sl4f_mock.return_value = SL4F(ssh, "http://192.168.1.1:80")
-
-        list_interfaces_mock.return_value = FUCHSIA_INTERFACES
-        self.assertEqual(
-            utils.get_interface_ip_addresses(
-                FuchsiaDevice({"ip": "192.168.1.1"}), "wlan1"
-            ),
-            CORRECT_EMPTY_IP_LIST,
-        )
-
-
-class GetDeviceTest(unittest.TestCase):
-    class TestDevice:
-        def __init__(self, id, device_type=None) -> None:
-            self.id = id
-            if device_type:
-                self.device_type = device_type
-
-    def test_get_device_none(self):
-        devices = []
-        self.assertRaises(ValueError, utils.get_device, devices, "DUT")
-
-    def test_get_device_default_one(self):
-        devices = [self.TestDevice(0)]
-        self.assertEqual(utils.get_device(devices, "DUT").id, 0)
-
-    def test_get_device_default_many(self):
-        devices = [self.TestDevice(0), self.TestDevice(1)]
-        self.assertEqual(utils.get_device(devices, "DUT").id, 0)
-
-    def test_get_device_specified_one(self):
-        devices = [self.TestDevice(0), self.TestDevice(1, "DUT")]
-        self.assertEqual(utils.get_device(devices, "DUT").id, 1)
-
-    def test_get_device_specified_many(self):
-        devices = [self.TestDevice(0, "DUT"), self.TestDevice(1, "DUT")]
-        self.assertRaises(ValueError, utils.get_device, devices, "DUT")
-
-
-if __name__ == "__main__":
-    unittest.main()
diff --git a/src/antlion/unit_tests/event/decorators_test.py b/src/antlion/unit_tests/event/decorators_test.py
deleted file mode 100755
index 078e013..0000000
--- a/src/antlion/unit_tests/event/decorators_test.py
+++ /dev/null
@@ -1,143 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import unittest
-from unittest import TestCase
-
-from antlion.event import event_bus
-from antlion.event.decorators import register_instance_subscriptions
-from antlion.event.decorators import register_static_subscriptions
-from antlion.event.decorators import subscribe
-from antlion.event.decorators import subscribe_static
-from antlion.event.event import Event
-from antlion.event.subscription_handle import SubscriptionHandle
-from mock import Mock
-
-
-class DecoratorsTest(TestCase):
-    """Tests the decorators found in antlion.event.decorators."""
-
-    def test_subscribe_static_return_type(self):
-        """Tests that the subscribe_static is the correct type."""
-        mock = Mock()
-
-        @subscribe_static(type)
-        def test(_):
-            return mock
-
-        self.assertTrue(isinstance(test, SubscriptionHandle))
-
-    def test_subscribe_static_calling_the_function_returns_normally(self):
-        """Tests that functions decorated by subscribe_static can be called."""
-        static_mock = Mock()
-
-        @subscribe_static(type)
-        def test(_):
-            return static_mock
-
-        self.assertEqual(test(Mock()), static_mock)
-
-    class DummyClass(object):
-        def __init__(self):
-            self.mock = Mock()
-
-        @subscribe(type)
-        def test(self, _):
-            return self.mock
-
-    def test_subscribe_return_type(self):
-        """Tests that subscribe returns the correct type."""
-        dummy_class = DecoratorsTest.DummyClass()
-        self.assertTrue(isinstance(dummy_class.test, SubscriptionHandle))
-
-    def test_subscribe_calling_the_function_returns_normally(self):
-        """tests that functions decorated by subscribe can be called."""
-        dummy_class = DecoratorsTest.DummyClass()
-        self.assertEqual(dummy_class.test(""), dummy_class.mock)
-
-
-class DummyEvent(Event):
-    """A dummy event used for testing registered functions."""
-
-
-class RegisterStaticSubscriptionsTest(TestCase):
-    def test_register_static_subscriptions_returns_passed_in_object(self):
-        obj = Mock()
-        returned_value = register_static_subscriptions(obj)
-        self.assertEqual(
-            obj,
-            returned_value,
-            "register_static_subscriptions returned a value other"
-            "than the object passed in.",
-        )
-
-    def test_register_static_subscriptions_registers_properly(self):
-        @register_static_subscriptions
-        class RegisterStaticSubscriptionsClass(object):
-            captured_event = None
-
-            @staticmethod
-            @subscribe_static(DummyEvent)
-            def on_static_event(evt):
-                RegisterStaticSubscriptionsClass.captured_event = evt
-
-        event = DummyEvent()
-        event_bus.post(event)
-
-        self.assertEqual(
-            event,
-            RegisterStaticSubscriptionsClass.captured_event,
-            "register_static_subscriptions did not subscribe "
-            "RegisterStaticSubscriptionsClass.on_static_event.",
-        )
-
-
-class RegisterInstanceSubscriptionsTest(TestCase):
-    def test_register_instance_subscriptions_returns_passed_in_object(self):
-        class SomeClass(object):
-            pass
-
-        returned_value = register_instance_subscriptions(SomeClass)
-        self.assertEqual(
-            SomeClass,
-            returned_value,
-            "register_instance_subscriptions returned a value "
-            "other than the object passed in.",
-        )
-
-    def test_register_instance_subscriptions_registers_properly(self):
-        @register_instance_subscriptions
-        class RegisterInstanceSubscriptionsClass(object):
-            def __init__(self):
-                self.captured_event = None
-
-            @subscribe(DummyEvent)
-            def on_instance_event(self, evt):
-                self.captured_event = evt
-
-        instance = RegisterInstanceSubscriptionsClass()
-        event = DummyEvent()
-        event_bus.post(event)
-
-        self.assertEqual(
-            event,
-            instance.captured_event,
-            "register_instance_subscriptions did not subscribe the instance "
-            "function RegisterInstanceSubscriptionsClass.on_instance_event.",
-        )
-
-
-if __name__ == "__main__":
-    unittest.main()
diff --git a/src/antlion/unit_tests/event/event_bus_integration_test.py b/src/antlion/unit_tests/event/event_bus_integration_test.py
deleted file mode 100755
index 18ba573..0000000
--- a/src/antlion/unit_tests/event/event_bus_integration_test.py
+++ /dev/null
@@ -1,124 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import tempfile
-import unittest
-from unittest import TestCase
-
-import mobly.config_parser as mobly_config_parser
-
-from antlion.base_test import BaseTestClass
-from antlion.event import event_bus, subscription_bundle
-from antlion.event.decorators import subscribe, subscribe_static
-from antlion.event.event import Event
-from antlion.test_runner import TestRunner
-
-
-class TestClass(BaseTestClass):
-    instance_event_received = []
-    static_event_received = []
-
-    def __init__(self, configs):
-        import mock
-
-        self.log = mock.Mock()
-        with mock.patch("mobly.utils.create_dir"):
-            super().__init__(configs)
-
-    @subscribe(Event)
-    def subscribed_instance_member(self, event):
-        TestClass.instance_event_received.append(event)
-
-    @staticmethod
-    @subscribe_static(Event)
-    def subscribed_static_member(event):
-        TestClass.static_event_received.append(event)
-
-    def test_post_event(self):
-        event_bus.post(Event())
-
-
-class EventBusIntegrationTest(TestCase):
-    """Tests the EventBus E2E."""
-
-    def setUp(self):
-        """Clears the event bus of all state."""
-        self.called_event = False
-        event_bus._event_bus = event_bus._EventBus()
-        TestClass.instance_event_received = []
-        TestClass.static_event_received = []
-
-    def test_test_class_subscribed_fn_receives_event(self):
-        """Tests that TestClasses have their subscribed functions called."""
-        with tempfile.TemporaryDirectory() as tmp_dir:
-            test_run_config = mobly_config_parser.TestRunConfig()
-            test_run_config.testbed_name = "SampleTestBed"
-            test_run_config.log_path = tmp_dir
-
-            TestRunner(test_run_config, [("TestClass", [])]).run(TestClass)
-
-        self.assertGreaterEqual(len(TestClass.instance_event_received), 1)
-        self.assertEqual(len(TestClass.static_event_received), 0)
-
-    def test_subscribe_static_bundles(self):
-        """Tests that @subscribe_static bundles register their listeners."""
-        bundle = subscription_bundle.create_from_static(TestClass)
-        bundle.register()
-
-        event_bus.post(Event())
-
-        self.assertEqual(len(TestClass.instance_event_received), 0)
-        self.assertEqual(len(TestClass.static_event_received), 1)
-
-    def test_subscribe_instance_bundles(self):
-        """Tests that @subscribe bundles register only instance listeners."""
-        test_run_config = mobly_config_parser.TestRunConfig()
-        test_run_config.testbed_name = ""
-        test_run_config.log_path = ""
-        test_object = TestClass(test_run_config)
-        bundle = subscription_bundle.create_from_instance(test_object)
-        bundle.register()
-
-        event_bus.post(Event())
-
-        self.assertEqual(len(TestClass.instance_event_received), 1)
-        self.assertEqual(len(TestClass.static_event_received), 0)
-
-    def test_event_register(self):
-        """Tests that event.register()'d functions can receive posted Events."""
-
-        def event_listener(_):
-            self.called_event = True
-
-        event_bus.register(Event, event_listener)
-        event_bus.post(Event())
-
-        self.assertTrue(self.called_event)
-
-    def test_event_unregister(self):
-        """Tests that an event can be registered, and then unregistered."""
-
-        def event_listener(_):
-            self.called_event = False
-
-        registration_id = event_bus.register(Event, event_listener)
-        event_bus.unregister(registration_id)
-        event_bus.post(Event())
-
-        self.assertFalse(self.called_event)
-
-
-if __name__ == "__main__":
-    unittest.main()
diff --git a/src/antlion/unit_tests/event/subscription_bundle_test.py b/src/antlion/unit_tests/event/subscription_bundle_test.py
deleted file mode 100755
index 48d6fe0..0000000
--- a/src/antlion/unit_tests/event/subscription_bundle_test.py
+++ /dev/null
@@ -1,193 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import unittest
-from unittest import TestCase
-
-import sys
-from antlion.event import subscription_bundle
-from antlion.event.decorators import subscribe
-from antlion.event.decorators import subscribe_static
-from antlion.event.event import Event
-from antlion.event.subscription_bundle import SubscriptionBundle
-from mock import Mock
-from mock import patch
-
-
-class SubscriptionBundleTest(TestCase):
-    """Tests the SubscriptionBundle class."""
-
-    def test_add_calls_add_subscription_properly(self):
-        """Tests that the convenience function add() calls add_subscription."""
-        event = object()
-        func = object()
-        event_filter = object()
-        order = object()
-        package = SubscriptionBundle()
-        package.add_subscription = Mock()
-
-        package.add(event, func, event_filter=event_filter, order=order)
-
-        self.assertEqual(package.add_subscription.call_count, 1)
-        subscription = package.add_subscription.call_args[0][0]
-        self.assertEqual(subscription._event_type, event)
-        self.assertEqual(subscription._func, func)
-        self.assertEqual(subscription._event_filter, event_filter)
-        self.assertEqual(subscription.order, order)
-
-    @patch("antlion.event.event_bus.register_subscription")
-    def test_add_subscription_registers_sub_if_package_is_registered(
-        self, register_subscription
-    ):
-        """Tests that add_subscription registers the subscription if the
-        SubscriptionBundle is already registered."""
-        package = SubscriptionBundle()
-        package._registered = True
-        mock_subscription = Mock()
-
-        package.add_subscription(mock_subscription)
-
-        self.assertEqual(register_subscription.call_count, 1)
-        register_subscription.assert_called_with(mock_subscription)
-
-    def test_add_subscription_adds_to_subscriptions(self):
-        """Tests add_subscription adds the subscription to subscriptions."""
-        mock_subscription = Mock()
-        package = SubscriptionBundle()
-
-        package.add_subscription(mock_subscription)
-
-        self.assertTrue(mock_subscription in package.subscriptions.keys())
-
-    def test_remove_subscription_removes_subscription_from_subscriptions(self):
-        """Tests remove_subscription removes the given subscription from the
-        subscriptions dictionary."""
-        mock_subscription = Mock()
-        package = SubscriptionBundle()
-        package.subscriptions[mock_subscription] = id(mock_subscription)
-
-        package.remove_subscription(mock_subscription)
-
-        self.assertTrue(mock_subscription not in package.subscriptions.keys())
-
-    @patch("antlion.event.event_bus.unregister")
-    def test_remove_subscription_unregisters_subscription(self, unregister):
-        """Tests that removing a subscription will also unregister it if the
-        SubscriptionBundle is registered."""
-        mock_subscription = Mock()
-        package = SubscriptionBundle()
-        package._registered = True
-        package.subscriptions[mock_subscription] = id(mock_subscription)
-
-        package.remove_subscription(mock_subscription)
-
-        self.assertEqual(unregister.call_count, 1)
-        unregistered_obj = unregister.call_args[0][0]
-        self.assertTrue(
-            unregistered_obj == id(mock_subscription)
-            or unregistered_obj == mock_subscription
-        )
-
-    @patch("antlion.event.event_bus.register_subscription")
-    def test_register_registers_all_subscriptions(self, register_subscription):
-        """Tests register() registers all subscriptions within the bundle."""
-        mock_subscription_list = [Mock(), Mock(), Mock()]
-        package = SubscriptionBundle()
-        package._registered = False
-        for subscription in mock_subscription_list:
-            package.subscriptions[subscription] = None
-
-        package.register()
-
-        self.assertEqual(register_subscription.call_count, len(mock_subscription_list))
-        args = {args[0] for args, _ in register_subscription.call_args_list}
-        for subscription in mock_subscription_list:
-            self.assertTrue(subscription in args or id(subscription) in args)
-
-    @patch("antlion.event.event_bus.unregister")
-    def test_register_registers_all_subscriptions(self, unregister):
-        """Tests register() registers all subscriptions within the bundle."""
-        mock_subscription_list = [Mock(), Mock(), Mock()]
-        package = SubscriptionBundle()
-        package._registered = True
-        for subscription in mock_subscription_list:
-            package.subscriptions[subscription] = id(subscription)
-
-        package.unregister()
-
-        self.assertEqual(unregister.call_count, len(mock_subscription_list))
-        args = {args[0] for args, _ in unregister.call_args_list}
-        for subscription in mock_subscription_list:
-            self.assertTrue(subscription in args or id(subscription) in args)
-
-
-class SubscriptionBundleStaticFunctions(TestCase):
-    """Tests the static functions found in subscription_bundle.py"""
-
-    @staticmethod
-    @subscribe_static(Event)
-    def static_listener_1():
-        pass
-
-    @staticmethod
-    @subscribe_static(Event)
-    def static_listener_2():
-        pass
-
-    @subscribe(Event)
-    def instance_listener_1(self):
-        pass
-
-    @subscribe(Event)
-    def instance_listener_2(self):
-        pass
-
-    def test_create_from_static(self):
-        """Tests create_from_static gets all StaticSubscriptionHandles."""
-        cls = self.__class__
-        bundle = subscription_bundle.create_from_static(cls)
-
-        self.assertEqual(len(bundle.subscriptions), 2)
-        keys = bundle.subscriptions.keys()
-        self.assertIn(cls.static_listener_1.subscription, keys)
-        self.assertIn(cls.static_listener_2.subscription, keys)
-
-    def test_create_from_instance(self):
-        """Tests create_from_instance gets all InstanceSubscriptionHandles."""
-        bundle = subscription_bundle.create_from_instance(self)
-
-        self.assertEqual(len(bundle.subscriptions), 2)
-        keys = bundle.subscriptions.keys()
-        self.assertIn(self.instance_listener_1.subscription, keys)
-        self.assertIn(self.instance_listener_2.subscription, keys)
-
-
-@subscribe_static(Event)
-def static_listener_1():
-    pass
-
-
-class SubscribeStaticModuleLevelTest(TestCase):
-    def test_create_from_static(self):
-        """Tests create_from_static gets all StaticSubscriptionHandles."""
-        bundle = subscription_bundle.create_from_static(sys.modules[self.__module__])
-
-        self.assertEqual(len(bundle.subscriptions), 1)
-        keys = bundle.subscriptions.keys()
-        self.assertIn(static_listener_1.subscription, keys)
-
-
-if __name__ == "__main__":
-    unittest.main()
diff --git a/src/antlion/unit_tests/test_runner_test.py b/src/antlion/unit_tests/test_runner_test.py
deleted file mode 100755
index 4bdc456..0000000
--- a/src/antlion/unit_tests/test_runner_test.py
+++ /dev/null
@@ -1,122 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import shutil
-import tempfile
-import unittest
-
-from mobly.config_parser import TestRunConfig
-from mock import Mock
-from mock import patch
-
-from antlion import test_runner
-
-
-class TestRunnerTest(unittest.TestCase):
-    def setUp(self):
-        self.tmp_dir = tempfile.mkdtemp()
-        self.base_mock_test_config = TestRunConfig()
-        self.base_mock_test_config.testbed_name = "SampleTestBed"
-        self.base_mock_test_config.log_path = self.tmp_dir
-        self.base_mock_test_config.controller_configs = {"testpaths": ["./"]}
-        self.base_mock_test_config.user_params = {"icecream": 42, "extra_param": "haha"}
-
-    def tearDown(self):
-        shutil.rmtree(self.tmp_dir)
-
-    @staticmethod
-    def create_test_classes(class_names):
-        return {class_name: Mock() for class_name in class_names}
-
-    @patch("antlion.records.TestResult")
-    @patch.object(test_runner.TestRunner, "_write_results_to_file")
-    def test_class_name_pattern_single(self, *_):
-        class_names = ["test_class_1", "test_class_2"]
-        pattern = "test*1"
-        tr = test_runner.TestRunner(self.base_mock_test_config, [(pattern, None)])
-
-        test_classes = self.create_test_classes(class_names)
-        tr.import_test_modules = Mock(return_value=test_classes)
-        tr.run()
-        self.assertTrue(test_classes[class_names[0]].called)
-        self.assertFalse(test_classes[class_names[1]].called)
-
-    @patch("antlion.records.TestResult")
-    @patch.object(test_runner.TestRunner, "_write_results_to_file")
-    def test_class_name_pattern_multi(self, *_):
-        class_names = ["test_class_1", "test_class_2", "other_name"]
-        pattern = "test_class*"
-        tr = test_runner.TestRunner(self.base_mock_test_config, [(pattern, None)])
-
-        test_classes = self.create_test_classes(class_names)
-        tr.import_test_modules = Mock(return_value=test_classes)
-        tr.run()
-        self.assertTrue(test_classes[class_names[0]].called)
-        self.assertTrue(test_classes[class_names[1]].called)
-        self.assertFalse(test_classes[class_names[2]].called)
-
-    @patch("antlion.records.TestResult")
-    @patch.object(test_runner.TestRunner, "_write_results_to_file")
-    def test_class_name_pattern_question_mark(self, *_):
-        class_names = ["test_class_1", "test_class_12"]
-        pattern = "test_class_?"
-        tr = test_runner.TestRunner(self.base_mock_test_config, [(pattern, None)])
-
-        test_classes = self.create_test_classes(class_names)
-        tr.import_test_modules = Mock(return_value=test_classes)
-        tr.run()
-        self.assertTrue(test_classes[class_names[0]].called)
-        self.assertFalse(test_classes[class_names[1]].called)
-
-    @patch("antlion.records.TestResult")
-    @patch.object(test_runner.TestRunner, "_write_results_to_file")
-    def test_class_name_pattern_char_seq(self, *_):
-        class_names = ["test_class_1", "test_class_2", "test_class_3"]
-        pattern = "test_class_[1357]"
-        tr = test_runner.TestRunner(self.base_mock_test_config, [(pattern, None)])
-
-        test_classes = self.create_test_classes(class_names)
-        tr.import_test_modules = Mock(return_value=test_classes)
-        tr.run()
-        self.assertTrue(test_classes[class_names[0]].called)
-        self.assertFalse(test_classes[class_names[1]].called)
-        self.assertTrue(test_classes[class_names[2]].called)
-
-    @patch("antlion.records.TestResult")
-    @patch.object(test_runner.TestRunner, "dump_config")
-    @patch.object(test_runner.TestRunner, "_write_results_to_file")
-    @patch("antlion.test_runner.logger")
-    def test_class_logpath_contains_proper_directory(self, logger_mock, *_):
-        expected_timestamp = "1970-01-01_00-00-00-00-000000"
-        logger_mock.get_log_file_timestamp.return_value = expected_timestamp
-        tr = test_runner.TestRunner(self.base_mock_test_config, [("MockTest", None)])
-        mock_class = Mock()
-        tr.import_test_modules = Mock(return_value={"MockTest": mock_class})
-        tr.run()
-
-        self.assertEqual(
-            mock_class.call_args_list[0][0][0].log_path,
-            os.path.join(
-                self.tmp_dir,
-                self.base_mock_test_config.testbed_name,
-                expected_timestamp,
-            ),
-        )
-
-
-if __name__ == "__main__":
-    unittest.main()
diff --git a/src/antlion/utils.py b/src/antlion/utils.py
deleted file mode 100755
index 2e6a03c..0000000
--- a/src/antlion/utils.py
+++ /dev/null
@@ -1,2046 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 The Fuchsia Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import base64
-import concurrent.futures
-import copy
-import datetime
-import functools
-import ipaddress
-import json
-import logging
-import os
-import platform
-import random
-import re
-import signal
-import string
-import socket
-import subprocess
-import time
-import threading
-import traceback
-import zipfile
-from concurrent.futures import ThreadPoolExecutor
-
-import mobly.keys as mobly_keys
-from typing import Any
-import yaml
-
-from antlion import signals, keys
-from antlion.controllers.adb_lib.error import AdbError
-from antlion.libs.proc import job
-
-# File name length is limited to 255 chars on some OS, so we need to make sure
-# the file names we output fits within the limit.
-MAX_FILENAME_LEN = 255
-
-# All Fuchsia devices use this suffix for link-local mDNS host names.
-FUCHSIA_MDNS_TYPE = "_fuchsia._udp.local."
-
-# Default max seconds it takes to Duplicate Address Detection to finish before
-# assigning an IPv6 address.
-DAD_TIMEOUT_SEC = 30
-
-
-class ActsUtilsError(Exception):
-    """Generic error raised for exceptions in ACTS utils."""
-
-
-class NexusModelNames:
-    # TODO(angli): This will be fixed later by angli.
-    ONE = "sprout"
-    N5 = "hammerhead"
-    N5v2 = "bullhead"
-    N6 = "shamu"
-    N6v2 = "angler"
-    N6v3 = "marlin"
-    N5v3 = "sailfish"
-
-
-class DozeModeStatus:
-    ACTIVE = "ACTIVE"
-    IDLE = "IDLE"
-
-
-ascii_letters_and_digits = string.ascii_letters + string.digits
-valid_filename_chars = "-_." + ascii_letters_and_digits
-
-models = (
-    "sprout",
-    "occam",
-    "hammerhead",
-    "bullhead",
-    "razor",
-    "razorg",
-    "shamu",
-    "angler",
-    "volantis",
-    "volantisg",
-    "mantaray",
-    "fugu",
-    "ryu",
-    "marlin",
-    "sailfish",
-)
-
-manufacture_name_to_model = {
-    "flo": "razor",
-    "flo_lte": "razorg",
-    "flounder": "volantis",
-    "flounder_lte": "volantisg",
-    "dragon": "ryu",
-}
-
-GMT_to_olson = {
-    "GMT-9": "America/Anchorage",
-    "GMT-8": "US/Pacific",
-    "GMT-7": "US/Mountain",
-    "GMT-6": "US/Central",
-    "GMT-5": "US/Eastern",
-    "GMT-4": "America/Barbados",
-    "GMT-3": "America/Buenos_Aires",
-    "GMT-2": "Atlantic/South_Georgia",
-    "GMT-1": "Atlantic/Azores",
-    "GMT+0": "Africa/Casablanca",
-    "GMT+1": "Europe/Amsterdam",
-    "GMT+2": "Europe/Athens",
-    "GMT+3": "Europe/Moscow",
-    "GMT+4": "Asia/Baku",
-    "GMT+5": "Asia/Oral",
-    "GMT+6": "Asia/Almaty",
-    "GMT+7": "Asia/Bangkok",
-    "GMT+8": "Asia/Hong_Kong",
-    "GMT+9": "Asia/Tokyo",
-    "GMT+10": "Pacific/Guam",
-    "GMT+11": "Pacific/Noumea",
-    "GMT+12": "Pacific/Fiji",
-    "GMT+13": "Pacific/Tongatapu",
-    "GMT-11": "Pacific/Midway",
-    "GMT-10": "Pacific/Honolulu",
-}
-
-
-def abs_path(path):
-    """Resolve the '.' and '~' in a path to get the absolute path.
-
-    Args:
-        path: The path to expand.
-
-    Returns:
-        The absolute path of the input path.
-    """
-    return os.path.abspath(os.path.expanduser(path))
-
-
-def get_current_epoch_time():
-    """Current epoch time in milliseconds.
-
-    Returns:
-        An integer representing the current epoch time in milliseconds.
-    """
-    return int(round(time.time() * 1000))
-
-
-def get_current_human_time():
-    """Returns the current time in human readable format.
-
-    Returns:
-        The current time stamp in Month-Day-Year Hour:Min:Sec format.
-    """
-    return time.strftime("%m-%d-%Y %H:%M:%S ")
-
-
-def epoch_to_human_time(epoch_time):
-    """Converts an epoch timestamp to human readable time.
-
-    This essentially converts an output of get_current_epoch_time to an output
-    of get_current_human_time
-
-    Args:
-        epoch_time: An integer representing an epoch timestamp in milliseconds.
-
-    Returns:
-        A time string representing the input time.
-        None if input param is invalid.
-    """
-    if isinstance(epoch_time, int):
-        try:
-            d = datetime.datetime.fromtimestamp(epoch_time / 1000)
-            return d.strftime("%m-%d-%Y %H:%M:%S ")
-        except ValueError:
-            return None
-
-
-def get_timezone_olson_id():
-    """Return the Olson ID of the local (non-DST) timezone.
-
-    Returns:
-        A string representing one of the Olson IDs of the local (non-DST)
-        timezone.
-    """
-    tzoffset = int(time.timezone / 3600)
-    gmt = None
-    if tzoffset <= 0:
-        gmt = "GMT+{}".format(-tzoffset)
-    else:
-        gmt = "GMT-{}".format(tzoffset)
-    return GMT_to_olson[gmt]
-
-
-def get_next_device(test_bed_controllers, used_devices):
-    """Gets the next device in a list of testbed controllers
-
-    Args:
-        test_bed_controllers: A list of testbed controllers of a particular
-            type, for example a list ACTS Android devices.
-        used_devices: A list of devices that have been used.  This can be a
-            mix of devices, for example a fuchsia device and an Android device.
-    Returns:
-        The next device in the test_bed_controllers list or None if there are
-        no items that are not in the used devices list.
-    """
-    if test_bed_controllers:
-        device_list = test_bed_controllers
-    else:
-        raise ValueError("test_bed_controllers is empty.")
-    for used_device in used_devices:
-        if used_device in device_list:
-            device_list.remove(used_device)
-    if device_list:
-        return device_list[0]
-    else:
-        return None
-
-
-def find_files(paths, file_predicate):
-    """Locate files whose names and extensions match the given predicate in
-    the specified directories.
-
-    Args:
-        paths: A list of directory paths where to find the files.
-        file_predicate: A function that returns True if the file name and
-          extension are desired.
-
-    Returns:
-        A list of files that match the predicate.
-    """
-    file_list = []
-    if not isinstance(paths, list):
-        paths = [paths]
-    for path in paths:
-        p = abs_path(path)
-        for dirPath, subdirList, fileList in os.walk(p):
-            for fname in fileList:
-                name, ext = os.path.splitext(fname)
-                if file_predicate(name, ext):
-                    file_list.append((dirPath, name, ext))
-    return file_list
-
-
-def load_config(file_full_path, log_errors=True):
-    """Loads a JSON config file.
-
-    Returns:
-        A JSON object.
-    """
-    with open(file_full_path, "r") as f:
-        try:
-            return json.load(f)
-        except Exception as e:
-            if log_errors:
-                logging.error("Exception error to load %s: %s", f, e)
-            raise
-
-
-def acts_json_to_mobly_yaml(json_path: str) -> str:
-    acts_config = load_config(json_path)
-    mobly_config = acts_to_mobly_config(acts_config)
-    mobly_yaml_path = json_path.rsplit(".json", 1)[0] + ".yaml"
-    with open(mobly_yaml_path, "w") as f:
-        yaml.safe_dump(mobly_config, f)
-    return mobly_yaml_path
-
-
-def acts_to_mobly_config(acts_config: Any) -> Any:
-    """Convert ACTS JSON config to Mobly YAML config.
-
-    Args:
-        acts_config: Full ACTS config as an object
-
-    Returns:
-        Mobly config as an object.
-    """
-    if not acts_config:
-        return acts_config
-
-    mobly_config = {}
-    if keys.Config.key_log_path.value in acts_config:
-        mobly_config[mobly_keys.Config.key_mobly_params.value] = {
-            mobly_keys.Config.key_log_path.value: acts_config[
-                keys.Config.key_log_path.value
-            ],
-        }
-
-    if keys.Config.key_testbed.value in acts_config:
-        testbeds = []
-        for acts_testbed in acts_config[keys.Config.key_testbed.value]:
-            mobly_testbed = {}
-            if keys.Config.key_testbed_name.value in acts_testbed:
-                name = acts_testbed.pop(keys.Config.key_testbed_name.value)
-                mobly_testbed[mobly_keys.Config.key_testbed_name.value] = name
-            mobly_testbed[mobly_keys.Config.key_testbed_test_params.value] = {
-                **{
-                    k: acts_config[k]
-                    for k in acts_config
-                    if k not in keys.Config.reserved_keys.value
-                },
-                **acts_testbed.pop(mobly_keys.Config.key_testbed_test_params.value, {}),
-            }
-            mobly_testbed[
-                mobly_keys.Config.key_testbed_controllers.value
-            ] = acts_testbed
-            testbeds.append(mobly_testbed)
-
-        mobly_config[mobly_keys.Config.key_testbed.value] = testbeds
-
-    return mobly_config
-
-
-def load_file_to_base64_str(f_path):
-    """Loads the content of a file into a base64 string.
-
-    Args:
-        f_path: full path to the file including the file name.
-
-    Returns:
-        A base64 string representing the content of the file in utf-8 encoding.
-    """
-    path = abs_path(f_path)
-    with open(path, "rb") as f:
-        f_bytes = f.read()
-        base64_str = base64.b64encode(f_bytes).decode("utf-8")
-        return base64_str
-
-
-def dump_string_to_file(content, file_path, mode="w"):
-    """Dump content of a string to
-
-    Args:
-        content: content to be dumped to file
-        file_path: full path to the file including the file name.
-        mode: file open mode, 'w' (truncating file) by default
-    :return:
-    """
-    full_path = abs_path(file_path)
-    with open(full_path, mode) as f:
-        f.write(content)
-
-
-def list_of_dict_to_dict_of_dict(list_of_dicts, dict_key):
-    """Transforms a list of dicts to a dict of dicts.
-
-    For instance:
-    >>> list_of_dict_to_dict_of_dict([{'a': '1', 'b':'2'},
-    >>>                               {'a': '3', 'b':'4'}],
-    >>>                              'b')
-
-    returns:
-
-    >>> {'2': {'a': '1', 'b':'2'},
-    >>>  '4': {'a': '3', 'b':'4'}}
-
-    Args:
-        list_of_dicts: A list of dictionaries.
-        dict_key: The key in the inner dict to be used as the key for the
-                  outer dict.
-    Returns:
-        A dict of dicts.
-    """
-    return {d[dict_key]: d for d in list_of_dicts}
-
-
-def dict_purge_key_if_value_is_none(dictionary):
-    """Removes all pairs with value None from dictionary."""
-    for k, v in dict(dictionary).items():
-        if v is None:
-            del dictionary[k]
-    return dictionary
-
-
-def find_field(item_list, cond, comparator, target_field):
-    """Finds the value of a field in a dict object that satisfies certain
-    conditions.
-
-    Args:
-        item_list: A list of dict objects.
-        cond: A param that defines the condition.
-        comparator: A function that checks if an dict satisfies the condition.
-        target_field: Name of the field whose value to be returned if an item
-            satisfies the condition.
-
-    Returns:
-        Target value or None if no item satisfies the condition.
-    """
-    for item in item_list:
-        if comparator(item, cond) and target_field in item:
-            return item[target_field]
-    return None
-
-
-def rand_ascii_str(length):
-    """Generates a random string of specified length, composed of ascii letters
-    and digits.
-
-    Args:
-        length: The number of characters in the string.
-
-    Returns:
-        The random string generated.
-    """
-    letters = [random.choice(ascii_letters_and_digits) for i in range(length)]
-    return "".join(letters)
-
-
-def rand_hex_str(length):
-    """Generates a random string of specified length, composed of hex digits
-
-    Args:
-        length: The number of characters in the string.
-
-    Returns:
-        The random string generated.
-    """
-    letters = [random.choice(string.hexdigits) for i in range(length)]
-    return "".join(letters)
-
-
-# Thead/Process related functions.
-def concurrent_exec(func, param_list):
-    """Executes a function with different parameters pseudo-concurrently.
-
-    This is basically a map function. Each element (should be an iterable) in
-    the param_list is unpacked and passed into the function. Due to Python's
-    GIL, there's no true concurrency. This is suited for IO-bound tasks.
-
-    Args:
-        func: The function that parforms a task.
-        param_list: A list of iterables, each being a set of params to be
-            passed into the function.
-
-    Returns:
-        A list of return values from each function execution. If an execution
-        caused an exception, the exception object will be the corresponding
-        result.
-    """
-    with concurrent.futures.ThreadPoolExecutor(max_workers=30) as executor:
-        # Start the load operations and mark each future with its params
-        future_to_params = {executor.submit(func, *p): p for p in param_list}
-        return_vals = []
-        for future in concurrent.futures.as_completed(future_to_params):
-            params = future_to_params[future]
-            try:
-                return_vals.append(future.result())
-            except Exception as exc:
-                print(
-                    "{} generated an exception: {}".format(
-                        params, traceback.format_exc()
-                    )
-                )
-                return_vals.append(exc)
-        return return_vals
-
-
-def exe_cmd(*cmds):
-    """Executes commands in a new shell.
-
-    Args:
-        cmds: A sequence of commands and arguments.
-
-    Returns:
-        The output of the command run.
-
-    Raises:
-        OSError is raised if an error occurred during the command execution.
-    """
-    cmd = " ".join(cmds)
-    proc = subprocess.Popen(
-        cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True
-    )
-    (out, err) = proc.communicate()
-    if not err:
-        return out
-    raise OSError(err)
-
-
-def require_sl4a(android_devices):
-    """Makes sure sl4a connection is established on the given AndroidDevice
-    objects.
-
-    Args:
-        android_devices: A list of AndroidDevice objects.
-
-    Raises:
-        AssertionError is raised if any given android device does not have SL4A
-        connection established.
-    """
-    for ad in android_devices:
-        msg = "SL4A connection not established properly on %s." % ad.serial
-        assert ad.droid, msg
-
-
-def _assert_subprocess_running(proc):
-    """Checks if a subprocess has terminated on its own.
-
-    Args:
-        proc: A subprocess returned by subprocess.Popen.
-
-    Raises:
-        ActsUtilsError is raised if the subprocess has stopped.
-    """
-    ret = proc.poll()
-    if ret is not None:
-        out, err = proc.communicate()
-        raise ActsUtilsError(
-            "Process %d has terminated. ret: %d, stderr: %s,"
-            " stdout: %s" % (proc.pid, ret, err, out)
-        )
-
-
-def start_standing_subprocess(cmd, check_health_delay=0, shell=True):
-    """Starts a long-running subprocess.
-
-    This is not a blocking call and the subprocess started by it should be
-    explicitly terminated with stop_standing_subprocess.
-
-    For short-running commands, you should use exe_cmd, which blocks.
-
-    You can specify a health check after the subprocess is started to make sure
-    it did not stop prematurely.
-
-    Args:
-        cmd: string, the command to start the subprocess with.
-        check_health_delay: float, the number of seconds to wait after the
-                            subprocess starts to check its health. Default is 0,
-                            which means no check.
-
-    Returns:
-        The subprocess that got started.
-    """
-    proc = subprocess.Popen(
-        cmd,
-        stdout=subprocess.PIPE,
-        stderr=subprocess.PIPE,
-        shell=shell,
-        preexec_fn=os.setpgrp,
-    )
-    logging.debug("Start standing subprocess with cmd: %s", cmd)
-    if check_health_delay > 0:
-        time.sleep(check_health_delay)
-        _assert_subprocess_running(proc)
-    return proc
-
-
-def stop_standing_subprocess(proc, kill_signal=signal.SIGTERM):
-    """Stops a subprocess started by start_standing_subprocess.
-
-    Before killing the process, we check if the process is running, if it has
-    terminated, ActsUtilsError is raised.
-
-    Catches and ignores the PermissionError which only happens on Macs.
-
-    Args:
-        proc: Subprocess to terminate.
-    """
-    pid = proc.pid
-    logging.debug("Stop standing subprocess %d", pid)
-    _assert_subprocess_running(proc)
-    try:
-        os.killpg(pid, kill_signal)
-    except PermissionError:
-        pass
-
-
-def wait_for_standing_subprocess(proc, timeout=None):
-    """Waits for a subprocess started by start_standing_subprocess to finish
-    or times out.
-
-    Propagates the exception raised by the subprocess.wait(.) function.
-    The subprocess.TimeoutExpired exception is raised if the process timed-out
-    rather then terminating.
-
-    If no exception is raised: the subprocess terminated on its own. No need
-    to call stop_standing_subprocess() to kill it.
-
-    If an exception is raised: the subprocess is still alive - it did not
-    terminate. Either call stop_standing_subprocess() to kill it, or call
-    wait_for_standing_subprocess() to keep waiting for it to terminate on its
-    own.
-
-    Args:
-        p: Subprocess to wait for.
-        timeout: An integer number of seconds to wait before timing out.
-    """
-    proc.wait(timeout)
-
-
-def sync_device_time(ad):
-    """Sync the time of an android device with the current system time.
-
-    Both epoch time and the timezone will be synced.
-
-    Args:
-        ad: The android device to sync time on.
-    """
-    ad.adb.shell("settings put global auto_time 0", ignore_status=True)
-    ad.adb.shell("settings put global auto_time_zone 0", ignore_status=True)
-    droid = ad.droid
-    droid.setTimeZone(get_timezone_olson_id())
-    droid.setTime(get_current_epoch_time())
-
-
-# Timeout decorator block
-class TimeoutError(Exception):
-    """Exception for timeout decorator related errors."""
-
-
-def _timeout_handler(signum, frame):
-    """Handler function used by signal to terminate a timed out function."""
-    raise TimeoutError()
-
-
-def timeout(sec):
-    """A decorator used to add time out check to a function.
-
-    This only works in main thread due to its dependency on signal module.
-    Do NOT use it if the decorated funtion does not run in the Main thread.
-
-    Args:
-        sec: Number of seconds to wait before the function times out.
-            No timeout if set to 0
-
-    Returns:
-        What the decorated function returns.
-
-    Raises:
-        TimeoutError is raised when time out happens.
-    """
-
-    def decorator(func):
-        @functools.wraps(func)
-        def wrapper(*args, **kwargs):
-            if sec:
-                signal.signal(signal.SIGALRM, _timeout_handler)
-                signal.alarm(sec)
-            try:
-                return func(*args, **kwargs)
-            except TimeoutError:
-                raise TimeoutError(
-                    ("Function {} timed out after {} " "seconds.").format(
-                        func.__name__, sec
-                    )
-                )
-            finally:
-                signal.alarm(0)
-
-        return wrapper
-
-    return decorator
-
-
-def trim_model_name(model):
-    """Trim any prefix and postfix and return the android designation of the
-    model name.
-
-    e.g. "m_shamu" will be trimmed to "shamu".
-
-    Args:
-        model: model name to be trimmed.
-
-    Returns
-        Trimmed model name if one of the known model names is found.
-        None otherwise.
-    """
-    # Directly look up first.
-    if model in models:
-        return model
-    if model in manufacture_name_to_model:
-        return manufacture_name_to_model[model]
-    # If not found, try trimming off prefix/postfix and look up again.
-    tokens = re.split("_|-", model)
-    for t in tokens:
-        if t in models:
-            return t
-        if t in manufacture_name_to_model:
-            return manufacture_name_to_model[t]
-    return None
-
-
-def force_airplane_mode(ad, new_state, timeout_value=60):
-    """Force the device to set airplane mode on or off by adb shell command.
-
-    Args:
-        ad: android device object.
-        new_state: Turn on airplane mode if True.
-            Turn off airplane mode if False.
-        timeout_value: max wait time for 'adb wait-for-device'
-
-    Returns:
-        True if success.
-        False if timeout.
-    """
-
-    # Using timeout decorator.
-    # Wait for device with timeout. If after <timeout_value> seconds, adb
-    # is still waiting for device, throw TimeoutError exception.
-    @timeout(timeout_value)
-    def wait_for_device_with_timeout(ad):
-        ad.adb.wait_for_device()
-
-    try:
-        wait_for_device_with_timeout(ad)
-        ad.adb.shell(
-            "settings put global airplane_mode_on {}".format(1 if new_state else 0)
-        )
-        ad.adb.shell("am broadcast -a android.intent.action.AIRPLANE_MODE")
-    except TimeoutError:
-        # adb wait for device timeout
-        return False
-    return True
-
-
-def get_battery_level(ad):
-    """Gets battery level from device
-
-    Returns:
-        battery_level: int indicating battery level
-    """
-    output = ad.adb.shell("dumpsys battery")
-    match = re.search(r"level: (?P<battery_level>\S+)", output)
-    battery_level = int(match.group("battery_level"))
-    return battery_level
-
-
-def get_device_usb_charging_status(ad):
-    """Returns the usb charging status of the device.
-
-    Args:
-        ad: android device object
-
-    Returns:
-        True if charging
-        False if not charging
-    """
-    adb_shell_result = ad.adb.shell("dumpsys deviceidle get charging")
-    ad.log.info("Device Charging State: {}".format(adb_shell_result))
-    return adb_shell_result == "true"
-
-
-def disable_usb_charging(ad):
-    """Unplug device from usb charging.
-
-    Args:
-        ad: android device object
-
-    Returns:
-        True if device is unplugged
-        False otherwise
-    """
-    ad.adb.shell("dumpsys battery unplug")
-    if not get_device_usb_charging_status(ad):
-        return True
-    else:
-        ad.log.info("Could not disable USB charging")
-        return False
-
-
-def enable_usb_charging(ad):
-    """Plug device to usb charging.
-
-    Args:
-        ad: android device object
-
-    Returns:
-        True if device is Plugged
-        False otherwise
-    """
-    ad.adb.shell("dumpsys battery reset")
-    if get_device_usb_charging_status(ad):
-        return True
-    else:
-        ad.log.info("Could not enable USB charging")
-        return False
-
-
-def enable_doze(ad):
-    """Force the device into doze mode.
-
-    Args:
-        ad: android device object.
-
-    Returns:
-        True if device is in doze mode.
-        False otherwise.
-    """
-    ad.adb.shell("dumpsys battery unplug")
-    ad.adb.shell("dumpsys deviceidle enable")
-    ad.adb.shell("dumpsys deviceidle force-idle")
-    ad.droid.goToSleepNow()
-    time.sleep(5)
-    adb_shell_result = ad.adb.shell("dumpsys deviceidle get deep")
-    if not adb_shell_result.startswith(DozeModeStatus.IDLE):
-        info = "dumpsys deviceidle get deep: {}".format(adb_shell_result)
-        print(info)
-        return False
-    return True
-
-
-def disable_doze(ad):
-    """Force the device not in doze mode.
-
-    Args:
-        ad: android device object.
-
-    Returns:
-        True if device is not in doze mode.
-        False otherwise.
-    """
-    ad.adb.shell("dumpsys deviceidle disable")
-    ad.adb.shell("dumpsys battery reset")
-    adb_shell_result = ad.adb.shell("dumpsys deviceidle get deep")
-    if not adb_shell_result.startswith(DozeModeStatus.ACTIVE):
-        info = "dumpsys deviceidle get deep: {}".format(adb_shell_result)
-        print(info)
-        return False
-    return True
-
-
-def enable_doze_light(ad):
-    """Force the device into doze light mode.
-
-    Args:
-        ad: android device object.
-
-    Returns:
-        True if device is in doze light mode.
-        False otherwise.
-    """
-    ad.adb.shell("dumpsys battery unplug")
-    ad.droid.goToSleepNow()
-    time.sleep(5)
-    ad.adb.shell("cmd deviceidle enable light")
-    ad.adb.shell("cmd deviceidle step light")
-    adb_shell_result = ad.adb.shell("dumpsys deviceidle get light")
-    if not adb_shell_result.startswith(DozeModeStatus.IDLE):
-        info = "dumpsys deviceidle get light: {}".format(adb_shell_result)
-        print(info)
-        return False
-    return True
-
-
-def disable_doze_light(ad):
-    """Force the device not in doze light mode.
-
-    Args:
-        ad: android device object.
-
-    Returns:
-        True if device is not in doze light mode.
-        False otherwise.
-    """
-    ad.adb.shell("dumpsys battery reset")
-    ad.adb.shell("cmd deviceidle disable light")
-    adb_shell_result = ad.adb.shell("dumpsys deviceidle get light")
-    if not adb_shell_result.startswith(DozeModeStatus.ACTIVE):
-        info = "dumpsys deviceidle get light: {}".format(adb_shell_result)
-        print(info)
-        return False
-    return True
-
-
-def set_ambient_display(ad, new_state):
-    """Set "Ambient Display" in Settings->Display
-
-    Args:
-        ad: android device object.
-        new_state: new state for "Ambient Display". True or False.
-    """
-    ad.adb.shell("settings put secure doze_enabled {}".format(1 if new_state else 0))
-
-
-def set_adaptive_brightness(ad, new_state):
-    """Set "Adaptive Brightness" in Settings->Display
-
-    Args:
-        ad: android device object.
-        new_state: new state for "Adaptive Brightness". True or False.
-    """
-    ad.adb.shell(
-        "settings put system screen_brightness_mode {}".format(1 if new_state else 0)
-    )
-
-
-def set_auto_rotate(ad, new_state):
-    """Set "Auto-rotate" in QuickSetting
-
-    Args:
-        ad: android device object.
-        new_state: new state for "Auto-rotate". True or False.
-    """
-    ad.adb.shell(
-        "settings put system accelerometer_rotation {}".format(1 if new_state else 0)
-    )
-
-
-def set_location_service(ad, new_state):
-    """Set Location service on/off in Settings->Location
-
-    Args:
-        ad: android device object.
-        new_state: new state for "Location service".
-            If new_state is False, turn off location service.
-            If new_state if True, set location service to "High accuracy".
-    """
-    ad.adb.shell(
-        "content insert --uri "
-        " content://com.google.settings/partner --bind "
-        "name:s:network_location_opt_in --bind value:s:1"
-    )
-    ad.adb.shell(
-        "content insert --uri "
-        " content://com.google.settings/partner --bind "
-        "name:s:use_location_for_services --bind value:s:1"
-    )
-    if new_state:
-        ad.adb.shell("settings put secure location_mode 3")
-    else:
-        ad.adb.shell("settings put secure location_mode 0")
-
-
-def set_mobile_data_always_on(ad, new_state):
-    """Set Mobile_Data_Always_On feature bit
-
-    Args:
-        ad: android device object.
-        new_state: new state for "mobile_data_always_on"
-            if new_state is False, set mobile_data_always_on disabled.
-            if new_state if True, set mobile_data_always_on enabled.
-    """
-    ad.adb.shell(
-        "settings put global mobile_data_always_on {}".format(1 if new_state else 0)
-    )
-
-
-def bypass_setup_wizard(ad):
-    """Bypass the setup wizard on an input Android device
-
-    Args:
-        ad: android device object.
-
-    Returns:
-        True if Android device successfully bypassed the setup wizard.
-        False if failed.
-    """
-    try:
-        ad.adb.shell(
-            'am start -n "com.google.android.setupwizard/' '.SetupWizardExitActivity"'
-        )
-        logging.debug("No error during default bypass call.")
-    except AdbError as adb_error:
-        if adb_error.stdout == "ADB_CMD_OUTPUT:0":
-            if adb_error.stderr and not adb_error.stderr.startswith("Error type 3\n"):
-                logging.error("ADB_CMD_OUTPUT:0, but error is %s " % adb_error.stderr)
-                raise adb_error
-            logging.debug(
-                "Bypass wizard call received harmless error 3: " "No setup to bypass."
-            )
-        elif adb_error.stdout == "ADB_CMD_OUTPUT:255":
-            # Run it again as root.
-            ad.adb.root_adb()
-            logging.debug("Need root access to bypass setup wizard.")
-            try:
-                ad.adb.shell(
-                    'am start -n "com.google.android.setupwizard/'
-                    '.SetupWizardExitActivity"'
-                )
-                logging.debug("No error during rooted bypass call.")
-            except AdbError as adb_error:
-                if adb_error.stdout == "ADB_CMD_OUTPUT:0":
-                    if adb_error.stderr and not adb_error.stderr.startswith(
-                        "Error type 3\n"
-                    ):
-                        logging.error(
-                            "Rooted ADB_CMD_OUTPUT:0, but error is "
-                            "%s " % adb_error.stderr
-                        )
-                        raise adb_error
-                    logging.debug(
-                        "Rooted bypass wizard call received harmless "
-                        "error 3: No setup to bypass."
-                    )
-
-    # magical sleep to wait for the gservices override broadcast to complete
-    time.sleep(3)
-
-    provisioned_state = int(ad.adb.shell("settings get global device_provisioned"))
-    if provisioned_state != 1:
-        logging.error("Failed to bypass setup wizard.")
-        return False
-    logging.debug("Setup wizard successfully bypassed.")
-    return True
-
-
-def parse_ping_ouput(ad, count, out, loss_tolerance=20):
-    """Ping Parsing util.
-
-    Args:
-        ad: Android Device Object.
-        count: Number of ICMP packets sent
-        out: shell output text of ping operation
-        loss_tolerance: Threshold after which flag test as false
-    Returns:
-        False: if packet loss is more than loss_tolerance%
-        True: if all good
-    """
-    result = re.search(
-        r"(\d+) packets transmitted, (\d+) received, (\d+)% packet loss", out
-    )
-    if not result:
-        ad.log.info("Ping failed with %s", out)
-        return False
-
-    packet_loss = int(result.group(3))
-    packet_xmit = int(result.group(1))
-    packet_rcvd = int(result.group(2))
-    min_packet_xmit_rcvd = (100 - loss_tolerance) * 0.01
-    if (
-        packet_loss > loss_tolerance
-        or packet_xmit < count * min_packet_xmit_rcvd
-        or packet_rcvd < count * min_packet_xmit_rcvd
-    ):
-        ad.log.error(
-            "%s, ping failed with loss more than tolerance %s%%",
-            result.group(0),
-            loss_tolerance,
-        )
-        return False
-    ad.log.info("Ping succeed with %s", result.group(0))
-    return True
-
-
-def adb_shell_ping(
-    ad, count=120, dest_ip="www.google.com", timeout=200, loss_tolerance=20
-):
-    """Ping utility using adb shell.
-
-    Args:
-        ad: Android Device Object.
-        count: Number of ICMP packets to send
-        dest_ip: hostname or IP address
-                 default www.google.com
-        timeout: timeout for icmp pings to complete.
-    """
-    ping_cmd = "ping -W 1"
-    if count:
-        ping_cmd += " -c %d" % count
-    if dest_ip:
-        ping_cmd += " %s" % dest_ip
-    try:
-        ad.log.info("Starting ping test to %s using adb command %s", dest_ip, ping_cmd)
-        out = ad.adb.shell(ping_cmd, timeout=timeout, ignore_status=True)
-        if not parse_ping_ouput(ad, count, out, loss_tolerance):
-            return False
-        return True
-    except Exception as e:
-        ad.log.warning("Ping Test to %s failed with exception %s", dest_ip, e)
-        return False
-
-
-def zip_directory(zip_name, src_dir):
-    """Compress a directory to a .zip file.
-
-    This implementation is thread-safe.
-
-    Args:
-        zip_name: str, name of the generated archive
-        src_dir: str, path to the source directory
-    """
-    with zipfile.ZipFile(zip_name, "w", zipfile.ZIP_DEFLATED) as zip:
-        for root, dirs, files in os.walk(src_dir):
-            for file in files:
-                path = os.path.join(root, file)
-                zip.write(path, os.path.relpath(path, src_dir))
-
-
-def unzip_maintain_permissions(zip_path, extract_location):
-    """Unzip a .zip file while maintaining permissions.
-
-    Args:
-        zip_path: The path to the zipped file.
-        extract_location: the directory to extract to.
-    """
-    with zipfile.ZipFile(zip_path, "r") as zip_file:
-        for info in zip_file.infolist():
-            _extract_file(zip_file, info, extract_location)
-
-
-def _extract_file(zip_file, zip_info, extract_location):
-    """Extracts a single entry from a ZipFile while maintaining permissions.
-
-    Args:
-        zip_file: A zipfile.ZipFile.
-        zip_info: A ZipInfo object from zip_file.
-        extract_location: The directory to extract to.
-    """
-    out_path = zip_file.extract(zip_info.filename, path=extract_location)
-    perm = zip_info.external_attr >> 16
-    os.chmod(out_path, perm)
-
-
-def get_directory_size(path):
-    """Computes the total size of the files in a directory, including subdirectories.
-
-    Args:
-        path: The path of the directory.
-    Returns:
-        The size of the provided directory.
-    """
-    total = 0
-    for dirpath, dirnames, filenames in os.walk(path):
-        for filename in filenames:
-            total += os.path.getsize(os.path.join(dirpath, filename))
-    return total
-
-
-def get_command_uptime(command_regex):
-    """Returns the uptime for a given command.
-
-    Args:
-        command_regex: A regex that matches the command line given. Must be
-            pgrep compatible.
-    """
-    pid = job.run("pgrep -f %s" % command_regex).stdout
-    runtime = ""
-    if pid:
-        runtime = job.run('ps -o etime= -p "%s"' % pid).stdout
-    return runtime
-
-
-def get_process_uptime(process):
-    """Returns the runtime in [[dd-]hh:]mm:ss, or '' if not running."""
-    pid = job.run("pidof %s" % process, ignore_status=True).stdout
-    runtime = ""
-    if pid:
-        runtime = job.run('ps -o etime= -p "%s"' % pid).stdout
-    return runtime
-
-
-def get_device_process_uptime(adb, process):
-    """Returns the uptime of a device process."""
-    pid = adb.shell("pidof %s" % process, ignore_status=True)
-    runtime = ""
-    if pid:
-        runtime = adb.shell('ps -o etime= -p "%s"' % pid)
-    return runtime
-
-
-def wait_until(func, timeout_s, condition=True, sleep_s=1.0):
-    """Executes a function repeatedly until condition is met.
-
-    Args:
-      func: The function pointer to execute.
-      timeout_s: Amount of time (in seconds) to wait before raising an
-                 exception.
-      condition: The ending condition of the WaitUntil loop.
-      sleep_s: The amount of time (in seconds) to sleep between each function
-               execution.
-
-    Returns:
-      The time in seconds before detecting a successful condition.
-
-    Raises:
-      TimeoutError: If the condition was never met and timeout is hit.
-    """
-    start_time = time.time()
-    end_time = start_time + timeout_s
-    count = 0
-    while True:
-        count += 1
-        if func() == condition:
-            return time.time() - start_time
-        if time.time() > end_time:
-            break
-        time.sleep(sleep_s)
-    raise TimeoutError(
-        "Failed to complete function %s in %d seconds having "
-        "attempted %d times." % (str(func), timeout_s, count)
-    )
-
-
-# Adapted from
-# https://en.wikibooks.org/wiki/Algorithm_Implementation/Strings/Levenshtein_distance#Python
-# Available under the Creative Commons Attribution-ShareAlike License
-def levenshtein(string1, string2):
-    """Returns the Levenshtein distance of two strings.
-    Uses Dynamic Programming approach, only keeping track of
-    two rows of the DP table at a time.
-
-    Args:
-      string1: String to compare to string2
-      string2: String to compare to string1
-
-    Returns:
-      distance: the Levenshtein distance between string1 and string2
-    """
-
-    if len(string1) < len(string2):
-        return levenshtein(string2, string1)
-
-    if len(string2) == 0:
-        return len(string1)
-
-    previous_row = range(len(string2) + 1)
-    for i, char1 in enumerate(string1):
-        current_row = [i + 1]
-        for j, char2 in enumerate(string2):
-            insertions = previous_row[j + 1] + 1
-            deletions = current_row[j] + 1
-            substitutions = previous_row[j] + (char1 != char2)
-            current_row.append(min(insertions, deletions, substitutions))
-        previous_row = current_row
-
-    return previous_row[-1]
-
-
-def string_similarity(s1, s2):
-    """Returns a similarity measurement based on Levenshtein distance.
-
-    Args:
-      s1: the string to compare to s2
-      s2: the string to compare to s1
-
-    Returns:
-      result: the similarity metric
-    """
-    lev = levenshtein(s1, s2)
-    try:
-        lev_ratio = float(lev) / max(len(s1), len(s2))
-        result = (1.0 - lev_ratio) * 100
-    except ZeroDivisionError:
-        result = 100 if not s2 else 0
-    return float(result)
-
-
-def run_concurrent_actions_no_raise(*calls):
-    """Concurrently runs all callables passed in using multithreading.
-
-    Example:
-
-    >>> def test_function_1(arg1, arg2):
-    >>>     return arg1, arg2
-    >>>
-    >>> def test_function_2(arg1, kwarg='kwarg'):
-    >>>     raise arg1(kwarg)
-    >>>
-    >>> run_concurrent_actions_no_raise(
-    >>>     lambda: test_function_1('arg1', 'arg2'),
-    >>>     lambda: test_function_2(IndexError, kwarg='kwarg'),
-    >>> )
-    >>> # Output:
-    >>> [('arg1', 'arg2'), IndexError('kwarg')]
-
-    Args:
-        *calls: A *args list of argumentless callable objects to be called. Note
-            that if a function has arguments it can be turned into an
-            argumentless function via the lambda keyword or functools.partial.
-
-    Returns:
-        An array of the returned values or exceptions received from calls,
-        respective of the order given.
-    """
-    with ThreadPoolExecutor(max_workers=len(calls)) as executor:
-        futures = [executor.submit(call) for call in calls]
-
-    results = []
-    for future in futures:
-        try:
-            results.append(future.result())
-        except Exception as e:
-            results.append(e)
-    return results
-
-
-def run_concurrent_actions(*calls):
-    """Runs all callables passed in concurrently using multithreading.
-
-    Examples:
-
-    >>> def test_function_1(arg1, arg2):
-    >>>     print(arg1, arg2)
-    >>>
-    >>> def test_function_2(arg1, kwarg='kwarg'):
-    >>>     raise arg1(kwarg)
-    >>>
-    >>> run_concurrent_actions(
-    >>>     lambda: test_function_1('arg1', 'arg2'),
-    >>>     lambda: test_function_2(IndexError, kwarg='kwarg'),
-    >>> )
-    >>> 'The above line raises IndexError("kwarg")'
-
-    Args:
-        *calls: A *args list of argumentless callable objects to be called. Note
-            that if a function has arguments it can be turned into an
-            argumentless function via the lambda keyword or functools.partial.
-
-    Returns:
-        An array of the returned values respective of the order of the calls
-        argument.
-
-    Raises:
-        If an exception is raised in any of the calls, the first exception
-        caught will be raised.
-    """
-    first_exception = None
-
-    class WrappedException(Exception):
-        """Raised when a passed-in callable raises an exception."""
-
-    def call_wrapper(call):
-        nonlocal first_exception
-
-        try:
-            return call()
-        except Exception as e:
-            logging.exception(e)
-            # Note that there is a potential race condition between two
-            # exceptions setting first_exception. Even if a locking mechanism
-            # was added to prevent this from happening, it is still possible
-            # that we capture the second exception as the first exception, as
-            # the active thread can swap to the thread that raises the second
-            # exception. There is no way to solve this with the tools we have
-            # here, so we do not bother. The effects this issue has on the
-            # system as a whole are negligible.
-            if first_exception is None:
-                first_exception = e
-            raise WrappedException(e)
-
-    with ThreadPoolExecutor(max_workers=len(calls)) as executor:
-        futures = [executor.submit(call_wrapper, call) for call in calls]
-
-    results = []
-    for future in futures:
-        try:
-            results.append(future.result())
-        except WrappedException:
-            # We do not need to raise here, since first_exception will already
-            # be set to the first exception raised by these callables.
-            break
-
-    if first_exception:
-        raise first_exception
-
-    return results
-
-
-def test_concurrent_actions(*calls, failure_exceptions=(Exception,)):
-    """Concurrently runs all passed in calls using multithreading.
-
-    If any callable raises an Exception found within failure_exceptions, the
-    test case is marked as a failure.
-
-    Example:
-    >>> def test_function_1(arg1, arg2):
-    >>>     print(arg1, arg2)
-    >>>
-    >>> def test_function_2(kwarg='kwarg'):
-    >>>     raise IndexError(kwarg)
-    >>>
-    >>> test_concurrent_actions(
-    >>>     lambda: test_function_1('arg1', 'arg2'),
-    >>>     lambda: test_function_2(kwarg='kwarg'),
-    >>>     failure_exceptions=IndexError
-    >>> )
-    >>> 'raises signals.TestFailure due to IndexError being raised.'
-
-    Args:
-        *calls: A *args list of argumentless callable objects to be called. Note
-            that if a function has arguments it can be turned into an
-            argumentless function via the lambda keyword or functools.partial.
-        failure_exceptions: A tuple of all possible Exceptions that will mark
-            the test as a FAILURE. Any exception that is not in this list will
-            mark the tests as UNKNOWN.
-
-    Returns:
-        An array of the returned values respective of the order of the calls
-        argument.
-
-    Raises:
-        signals.TestFailure if any call raises an Exception.
-    """
-    try:
-        return run_concurrent_actions(*calls)
-    except signals.TestFailure:
-        # Do not modify incoming test failures
-        raise
-    except failure_exceptions as e:
-        raise signals.TestFailure(e)
-
-
-class SuppressLogOutput(object):
-    """Context manager used to suppress all logging output for the specified
-    logger and level(s).
-    """
-
-    def __init__(self, logger=logging.getLogger(), log_levels=None):
-        """Create a SuppressLogOutput context manager
-
-        Args:
-            logger: The logger object to suppress
-            log_levels: Levels of log handlers to disable.
-        """
-
-        self._logger = logger
-        self._log_levels = log_levels or [
-            logging.DEBUG,
-            logging.INFO,
-            logging.WARNING,
-            logging.ERROR,
-            logging.CRITICAL,
-        ]
-        if isinstance(self._log_levels, int):
-            self._log_levels = [self._log_levels]
-        self._handlers = copy.copy(self._logger.handlers)
-
-    def __enter__(self):
-        for handler in self._handlers:
-            if handler.level in self._log_levels:
-                self._logger.removeHandler(handler)
-        return self
-
-    def __exit__(self, *_):
-        for handler in self._handlers:
-            self._logger.addHandler(handler)
-
-
-class BlockingTimer(object):
-    """Context manager used to block until a specified amount of time has
-    elapsed.
-    """
-
-    def __init__(self, secs):
-        """Initializes a BlockingTimer
-
-        Args:
-            secs: Number of seconds to wait before exiting
-        """
-        self._thread = threading.Timer(secs, lambda: None)
-
-    def __enter__(self):
-        self._thread.start()
-        return self
-
-    def __exit__(self, *_):
-        self._thread.join()
-
-
-def is_valid_ipv4_address(address):
-    try:
-        socket.inet_pton(socket.AF_INET, address)
-    except AttributeError:  # no inet_pton here, sorry
-        try:
-            socket.inet_aton(address)
-        except socket.error:
-            return False
-        return address.count(".") == 3
-    except socket.error:  # not a valid address
-        return False
-
-    return True
-
-
-def is_valid_ipv6_address(address):
-    if "%" in address:
-        address = address.split("%")[0]
-    try:
-        socket.inet_pton(socket.AF_INET6, address)
-    except socket.error:  # not a valid address
-        return False
-    return True
-
-
-def merge_dicts(*dict_args):
-    """Merges args list of dictionaries into a single dictionary.
-
-    Args:
-        dict_args: an args list of dictionaries to be merged. If multiple
-            dictionaries share a key, the last in the list will appear in the
-            final result.
-    """
-    result = {}
-    for dictionary in dict_args:
-        result.update(dictionary)
-    return result
-
-
-def ascii_string(uc_string):
-    """Converts unicode string to ascii"""
-    return str(uc_string).encode("ASCII")
-
-
-def get_interface_ip_addresses(comm_channel, interface):
-    """Gets all of the ip addresses, ipv4 and ipv6, associated with a
-       particular interface name.
-
-    Args:
-        comm_channel: How to send commands to a device.  Can be ssh, adb serial,
-            etc.  Must have the run function implemented.
-        interface: The interface name on the device, ie eth0
-
-    Returns:
-        A list of dictionaries of the the various IP addresses:
-            ipv4_private: Any 192.168, 172.16, 10, or 169.254 addresses
-            ipv4_public: Any IPv4 public addresses
-            ipv6_link_local: Any fe80:: addresses
-            ipv6_private_local: Any fd00:: addresses
-            ipv6_public: Any publicly routable addresses
-    """
-    # Local imports are used here to prevent cyclic dependency.
-    from antlion.controllers.android_device import AndroidDevice
-    from antlion.controllers.fuchsia_device import FuchsiaDevice
-    from antlion.controllers.utils_lib.ssh.connection import SshConnection
-
-    is_local = comm_channel == job
-    if type(comm_channel) is AndroidDevice:
-        addrs = comm_channel.adb.shell(
-            f'ip -o addr show {interface} | awk \'{{gsub("/", " "); print $4}}\''
-        ).splitlines()
-    elif type(comm_channel) is SshConnection or is_local:
-        addrs = comm_channel.run(
-            f'ip -o addr show {interface} | awk \'{{gsub("/", " "); print $4}}\''
-        ).stdout.splitlines()
-    elif type(comm_channel) is FuchsiaDevice:
-        interfaces = comm_channel.sl4f.netstack_lib.netstackListInterfaces()
-        err = interfaces.get("error")
-        if err is not None:
-            raise ActsUtilsError(f"Failed get_interface_ip_addresses: {err}")
-        addrs = []
-        for item in interfaces.get("result"):
-            if item["name"] != interface:
-                continue
-            for ipv4_address in item["ipv4_addresses"]:
-                ipv4_address = ".".join(map(str, ipv4_address))
-                addrs.append(ipv4_address)
-            for ipv6_address in item["ipv6_addresses"]:
-                converted_ipv6_address = []
-                for octet in ipv6_address:
-                    converted_ipv6_address.append(format(octet, "x").zfill(2))
-                ipv6_address = "".join(converted_ipv6_address)
-                ipv6_address = ":".join(
-                    ipv6_address[i : i + 4] for i in range(0, len(ipv6_address), 4)
-                )
-                addrs.append(str(ipaddress.ip_address(ipv6_address)))
-    else:
-        raise ValueError("Unsupported method to send command to device.")
-
-    ipv4_private_local_addresses = []
-    ipv4_public_addresses = []
-    ipv6_link_local_addresses = []
-    ipv6_private_local_addresses = []
-    ipv6_public_addresses = []
-
-    for addr in addrs:
-        on_device_ip = ipaddress.ip_address(addr)
-        if on_device_ip.version == 4:
-            if on_device_ip.is_private:
-                ipv4_private_local_addresses.append(str(on_device_ip))
-            elif on_device_ip.is_global or (
-                # Carrier private doesn't have a property, so we check if
-                # all other values are left unset.
-                not on_device_ip.is_reserved
-                and not on_device_ip.is_unspecified
-                and not on_device_ip.is_link_local
-                and not on_device_ip.is_loopback
-                and not on_device_ip.is_multicast
-            ):
-                ipv4_public_addresses.append(str(on_device_ip))
-        elif on_device_ip.version == 6:
-            if on_device_ip.is_link_local:
-                ipv6_link_local_addresses.append(str(on_device_ip))
-            elif on_device_ip.is_private:
-                ipv6_private_local_addresses.append(str(on_device_ip))
-            elif on_device_ip.is_global:
-                ipv6_public_addresses.append(str(on_device_ip))
-
-    return {
-        "ipv4_private": ipv4_private_local_addresses,
-        "ipv4_public": ipv4_public_addresses,
-        "ipv6_link_local": ipv6_link_local_addresses,
-        "ipv6_private_local": ipv6_private_local_addresses,
-        "ipv6_public": ipv6_public_addresses,
-    }
-
-
-class AddressTimeout(signals.TestError):
-    pass
-
-
-class MultipleAddresses(signals.TestError):
-    pass
-
-
-def get_addr(comm_channel, interface, addr_type="ipv4_private", timeout_sec=None):
-    """Get the requested type of IP address for an interface; if an address is
-    not available, retry until the timeout has been reached.
-
-    Args:
-        addr_type: Type of address to get as defined by the return value of
-            utils.get_interface_ip_addresses.
-        timeout_sec: Seconds to wait to acquire an address if there isn't one
-            already available. If fetching an IPv4 address, the default is 3
-            seconds. If IPv6, the default is 30 seconds for Duplicate Address
-            Detection.
-
-    Returns:
-        A string containing the requested address.
-
-    Raises:
-        TestAbortClass: timeout_sec is None and invalid addr_type
-        AddressTimeout: No address is available after timeout_sec
-        MultipleAddresses: Several addresses are available
-    """
-    if not timeout_sec:
-        if "ipv4" in addr_type:
-            timeout_sec = 3
-        elif "ipv6" in addr_type:
-            timeout_sec = DAD_TIMEOUT_SEC
-        else:
-            raise signals.TestAbortClass(f'Unknown addr_type "{addr_type}"')
-
-    start = time.time()
-    elapsed = 0
-
-    while elapsed <= timeout_sec:
-        ip_addrs = get_interface_ip_addresses(comm_channel, interface)[addr_type]
-        if len(ip_addrs) > 1:
-            raise MultipleAddresses(
-                f'Expected only one "{addr_type}" address, got {ip_addrs}'
-            )
-        elif len(ip_addrs) == 1:
-            return ip_addrs[0]
-        elapsed = time.time() - start
-
-    raise AddressTimeout(f'No available "{addr_type}" address after {timeout_sec}s')
-
-
-def get_interface_based_on_ip(comm_channel, desired_ip_address):
-    """Gets the interface for a particular IP
-
-    Args:
-        comm_channel: How to send commands to a device.  Can be ssh, adb serial,
-            etc.  Must have the run function implemented.
-        desired_ip_address: The IP address that is being looked for on a device.
-
-    Returns:
-        The name of the test interface.
-    """
-
-    desired_ip_address = desired_ip_address.split("%", 1)[0]
-    all_ips_and_interfaces = comm_channel.run(
-        "(ip -o -4 addr show; ip -o -6 addr show) | " "awk '{print $2\" \"$4}'"
-    ).stdout
-    for ip_address_and_interface in all_ips_and_interfaces.split("\n"):
-        if desired_ip_address in ip_address_and_interface:
-            return ip_address_and_interface.split()[1][:-1]
-    return None
-
-
-def renew_linux_ip_address(comm_channel, interface):
-    comm_channel.run("sudo ip link set %s down" % interface)
-    comm_channel.run("sudo ip link set %s up" % interface)
-    comm_channel.run("sudo dhclient -r %s" % interface)
-    comm_channel.run("sudo dhclient %s" % interface)
-
-
-def get_ping_command(
-    dest_ip,
-    count=3,
-    interval=1000,
-    timeout=1000,
-    size=56,
-    os_type="Linux",
-    additional_ping_params=None,
-):
-    """Builds ping command string based on address type, os, and params.
-
-    Args:
-        dest_ip: string, address to ping (ipv4 or ipv6)
-        count: int, number of requests to send
-        interval: int, time in seconds between requests
-        timeout: int, time in seconds to wait for response
-        size: int, number of bytes to send,
-        os_type: string, os type of the source device (supports 'Linux',
-            'Darwin')
-        additional_ping_params: string, command option flags to
-            append to the command string
-
-    Returns:
-        List of string, represetning the ping command.
-    """
-    if is_valid_ipv4_address(dest_ip):
-        ping_binary = "ping"
-    elif is_valid_ipv6_address(dest_ip):
-        ping_binary = "ping6"
-    else:
-        raise ValueError("Invalid ip addr: %s" % dest_ip)
-
-    if os_type == "Darwin":
-        if is_valid_ipv6_address(dest_ip):
-            # ping6 on MacOS doesn't support timeout
-            logging.debug("Ignoring timeout, as ping6 on MacOS does not support it.")
-            timeout_flag = []
-        else:
-            timeout_flag = ["-t", str(timeout / 1000)]
-    elif os_type == "Linux":
-        timeout_flag = ["-W", str(timeout / 1000)]
-    else:
-        raise ValueError("Invalid OS.  Only Linux and MacOS are supported.")
-
-    if not additional_ping_params:
-        additional_ping_params = ""
-
-    ping_cmd = [
-        ping_binary,
-        *timeout_flag,
-        "-c",
-        str(count),
-        "-i",
-        str(interval / 1000),
-        "-s",
-        str(size),
-        additional_ping_params,
-        dest_ip,
-    ]
-    return " ".join(ping_cmd)
-
-
-def ping(
-    comm_channel,
-    dest_ip,
-    count=3,
-    interval=1000,
-    timeout=1000,
-    size=56,
-    additional_ping_params=None,
-):
-    """Generic linux ping function, supports local (acts.libs.proc.job) and
-    SshConnections (acts.libs.proc.job over ssh) to Linux based OSs and MacOS.
-
-    NOTES: This will work with Android over SSH, but does not function over ADB
-    as that has a unique return format.
-
-    Args:
-        comm_channel: communication channel over which to send ping command.
-            Must have 'run' function that returns at least command, stdout,
-            stderr, and exit_status (see acts.libs.proc.job)
-        dest_ip: address to ping (ipv4 or ipv6)
-        count: int, number of packets to send
-        interval: int, time in milliseconds between pings
-        timeout: int, time in milliseconds to wait for response
-        size: int, size of packets in bytes
-        additional_ping_params: string, command option flags to
-            append to the command string
-
-    Returns:
-        Dict containing:
-            command: string
-            exit_status: int (0 or 1)
-            stdout: string
-            stderr: string
-            transmitted: int, number of packets transmitted
-            received: int, number of packets received
-            packet_loss: int, percentage packet loss
-            time: int, time of ping command execution (in milliseconds)
-            rtt_min: float, minimum round trip time
-            rtt_avg: float, average round trip time
-            rtt_max: float, maximum round trip time
-            rtt_mdev: float, round trip time standard deviation
-
-        Any values that cannot be parsed are left as None
-    """
-    from antlion.controllers.utils_lib.ssh.connection import SshConnection
-
-    is_local = comm_channel == job
-    os_type = platform.system() if is_local else "Linux"
-    ping_cmd = get_ping_command(
-        dest_ip,
-        count=count,
-        interval=interval,
-        timeout=timeout,
-        size=size,
-        os_type=os_type,
-        additional_ping_params=additional_ping_params,
-    )
-
-    if type(comm_channel) is SshConnection or is_local:
-        logging.debug(
-            "Running ping with parameters (count: %s, interval: %s, timeout: "
-            "%s, size: %s)" % (count, interval, timeout, size)
-        )
-        ping_result = comm_channel.run(ping_cmd, ignore_status=True)
-    else:
-        raise ValueError("Unsupported comm_channel: %s" % type(comm_channel))
-
-    if isinstance(ping_result, job.Error):
-        ping_result = ping_result.result
-
-    transmitted = None
-    received = None
-    packet_loss = None
-    time = None
-    rtt_min = None
-    rtt_avg = None
-    rtt_max = None
-    rtt_mdev = None
-
-    summary = re.search(
-        "([0-9]+) packets transmitted.*?([0-9]+) received.*?([0-9]+)% packet "
-        "loss.*?time ([0-9]+)",
-        ping_result.stdout,
-    )
-    if summary:
-        transmitted = summary[1]
-        received = summary[2]
-        packet_loss = summary[3]
-        time = summary[4]
-
-    rtt_stats = re.search(
-        "= ([0-9.]+)/([0-9.]+)/([0-9.]+)/([0-9.]+)", ping_result.stdout
-    )
-    if rtt_stats:
-        rtt_min = rtt_stats[1]
-        rtt_avg = rtt_stats[2]
-        rtt_max = rtt_stats[3]
-        rtt_mdev = rtt_stats[4]
-
-    return {
-        "command": ping_result.command,
-        "exit_status": ping_result.exit_status,
-        "stdout": ping_result.stdout,
-        "stderr": ping_result.stderr,
-        "transmitted": transmitted,
-        "received": received,
-        "packet_loss": packet_loss,
-        "time": time,
-        "rtt_min": rtt_min,
-        "rtt_avg": rtt_avg,
-        "rtt_max": rtt_max,
-        "rtt_mdev": rtt_mdev,
-    }
-
-
-def can_ping(
-    comm_channel,
-    dest_ip,
-    count=3,
-    interval=1000,
-    timeout=1000,
-    size=56,
-    additional_ping_params=None,
-):
-    """Returns whether device connected via comm_channel can ping a dest
-    address"""
-    ping_results = ping(
-        comm_channel,
-        dest_ip,
-        count=count,
-        interval=interval,
-        timeout=timeout,
-        size=size,
-        additional_ping_params=additional_ping_params,
-    )
-
-    return ping_results["exit_status"] == 0
-
-
-def ip_in_subnet(ip, subnet):
-    """Validate that ip is in a given subnet.
-
-    Args:
-        ip: string, ip address to verify (eg. '192.168.42.158')
-        subnet: string, subnet to check (eg. '192.168.42.0/24')
-
-    Returns:
-        True, if ip in subnet, else False
-    """
-    return ipaddress.ip_address(ip) in ipaddress.ip_network(subnet)
-
-
-def mac_address_str_to_list(mac_addr_str):
-    """Converts mac address string to list of decimal octets.
-
-    Args:
-        mac_addr_string: string, mac address
-            e.g. '12:34:56:78:9a:bc'
-
-    Returns
-        list, representing mac address octets in decimal
-            e.g. [18, 52, 86, 120, 154, 188]
-    """
-    return [int(octet, 16) for octet in mac_addr_str.split(":")]
-
-
-def mac_address_list_to_str(mac_addr_list):
-    """Converts list of decimal octets represeting mac address to string.
-
-    Args:
-        mac_addr_list: list, representing mac address octets in decimal
-            e.g. [18, 52, 86, 120, 154, 188]
-
-    Returns:
-        string, mac address
-            e.g. '12:34:56:78:9a:bc'
-    """
-    hex_list = []
-    for octet in mac_addr_list:
-        hex_octet = hex(octet)[2:]
-        if octet < 16:
-            hex_list.append("0%s" % hex_octet)
-        else:
-            hex_list.append(hex_octet)
-
-    return ":".join(hex_list)
-
-
-def get_fuchsia_mdns_ipv6_address(device_mdns_name):
-    """Finds the IPv6 link-local address of a Fuchsia device matching a mDNS
-    name.
-
-    Args:
-        device_mdns_name: name of Fuchsia device (e.g. gig-clone-sugar-slash)
-
-    Returns:
-        string, IPv6 link-local address
-    """
-    import psutil
-    from zeroconf import IPVersion, Zeroconf
-
-    if not device_mdns_name:
-        return None
-
-    def mdns_query(interface, address):
-        logging.info(
-            f'Sending mDNS query for device "{device_mdns_name}" using "{address}"'
-        )
-        try:
-            zeroconf = Zeroconf(ip_version=IPVersion.V6Only, interfaces=[address])
-        except RuntimeError as e:
-            if "No adapter found for IP address" in e.args[0]:
-                # Most likely, a device went offline and its control
-                # interface was deleted. This is acceptable since the
-                # device that went offline isn't guaranteed to be the
-                # device we're searching for.
-                logging.warning('No adapter found for "%s"' % address)
-                return None
-            raise
-
-        device_records = zeroconf.get_service_info(
-            FUCHSIA_MDNS_TYPE, device_mdns_name + "." + FUCHSIA_MDNS_TYPE
-        )
-
-        if device_records:
-            for device_address in device_records.parsed_addresses():
-                device_ip_address = ipaddress.ip_address(device_address)
-                scoped_address = "%s%%%s" % (device_address, interface)
-                if (
-                    device_ip_address.version == 6
-                    and device_ip_address.is_link_local
-                    and can_ping(job, dest_ip=scoped_address)
-                ):
-                    logging.info(
-                        'Found device "%s" at "%s"' % (device_mdns_name, scoped_address)
-                    )
-                    zeroconf.close()
-                    del zeroconf
-                    return scoped_address
-
-        zeroconf.close()
-        del zeroconf
-        return None
-
-    with ThreadPoolExecutor() as executor:
-        futures = []
-
-        interfaces = psutil.net_if_addrs()
-        for interface in interfaces:
-            for addr in interfaces[interface]:
-                address = addr.address.split("%")[0]
-                if (
-                    addr.family == socket.AF_INET6
-                    and ipaddress.ip_address(address).is_link_local
-                    and address != "fe80::1"
-                ):
-                    futures.append(executor.submit(mdns_query, interface, address))
-
-        for future in futures:
-            addr = future.result()
-            if addr:
-                return addr
-
-    logging.error('Unable to find IP address for device "%s"' % device_mdns_name)
-    return None
-
-
-def get_device(devices, device_type):
-    """Finds a unique device with the specified "device_type" attribute from a
-    list. If none is found, defaults to the first device in the list.
-
-    Example:
-        get_device(android_devices, device_type="DUT")
-        get_device(fuchsia_devices, device_type="DUT")
-        get_device(android_devices + fuchsia_devices, device_type="DUT")
-
-    Args:
-        devices: A list of device controller objects.
-        device_type: (string) Type of device to find, specified by the
-            "device_type" attribute.
-
-    Returns:
-        The matching device controller object, or the first device in the list
-        if not found.
-
-    Raises:
-        ValueError is raised if none or more than one device is
-        matched.
-    """
-    if not devices:
-        raise ValueError("No devices available")
-
-    matches = [
-        d for d in devices if hasattr(d, "device_type") and d.device_type == device_type
-    ]
-
-    if len(matches) == 0:
-        # No matches for the specified "device_type", use the first device
-        # declared.
-        return devices[0]
-    if len(matches) > 1:
-        # Specifing multiple devices with the same "device_type" is a
-        # configuration error.
-        raise ValueError(
-            'More than one device matching "device_type" == "{}"'.format(device_type)
-        )
-
-    return matches[0]
diff --git a/stubs/README.md b/stubs/README.md
new file mode 100644
index 0000000..07ec6ae
--- /dev/null
+++ b/stubs/README.md
@@ -0,0 +1,11 @@
+# Python typing stubs
+
+Contains typing stubs for Python packages that do not expose typing of their
+own. Generated initially with [`stubgen`][stubgen] then manually modified to
+satisfy [`mypy`][mypy].
+
+> TODO(http://b/285005406): Contribute type annotations to Mobly, bump the
+> version of Mobly, then remove these type stubs.
+
+[stubgen]: https://mypy.readthedocs.io/en/stable/stubgen.html
+[mypy]: https://mypy.readthedocs.io/en/stable/
diff --git a/src/antlion/__init__.py b/stubs/mobly/__init__.pyi
similarity index 100%
copy from src/antlion/__init__.py
copy to stubs/mobly/__init__.pyi
diff --git a/stubs/mobly/asserts.pyi b/stubs/mobly/asserts.pyi
new file mode 100644
index 0000000..0fa0557
--- /dev/null
+++ b/stubs/mobly/asserts.pyi
@@ -0,0 +1,104 @@
+from _typeshed import Incomplete
+from mobly import signals as signals
+
+def assert_equal(
+    first, second, msg: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> None: ...
+def assert_not_equal(
+    first, second, msg: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> None: ...
+def assert_almost_equal(
+    first,
+    second,
+    places: Incomplete | None = ...,
+    msg: Incomplete | None = ...,
+    delta: Incomplete | None = ...,
+    extras: Incomplete | None = ...,
+) -> None: ...
+def assert_not_almost_equal(
+    first,
+    second,
+    places: Incomplete | None = ...,
+    msg: Incomplete | None = ...,
+    delta: Incomplete | None = ...,
+    extras: Incomplete | None = ...,
+) -> None: ...
+def assert_in(
+    member, container, msg: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> None: ...
+def assert_not_in(
+    member, container, msg: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> None: ...
+def assert_is(
+    expr1, expr2, msg: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> None: ...
+def assert_is_not(
+    expr1, expr2, msg: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> None: ...
+def assert_count_equal(
+    first, second, msg: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> None: ...
+def assert_less(
+    a, b, msg: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> None: ...
+def assert_less_equal(
+    a, b, msg: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> None: ...
+def assert_greater(
+    a, b, msg: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> None: ...
+def assert_greater_equal(
+    a, b, msg: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> None: ...
+def assert_is_none(
+    obj, msg: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> None: ...
+def assert_is_not_none(
+    obj, msg: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> None: ...
+def assert_is_instance(
+    obj, cls, msg: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> None: ...
+def assert_not_is_instance(
+    obj, cls, msg: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> None: ...
+def assert_regex(
+    text, expected_regex, msg: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> None: ...
+def assert_not_regex(
+    text,
+    unexpected_regex,
+    msg: Incomplete | None = ...,
+    extras: Incomplete | None = ...,
+) -> None: ...
+def assert_raises(
+    expected_exception, extras: Incomplete | None = ..., *args, **kwargs
+): ...
+def assert_raises_regex(
+    expected_exception, expected_regex, extras: Incomplete | None = ..., *args, **kwargs
+): ...
+def assert_true(expr, msg, extras: Incomplete | None = ...) -> None: ...
+def assert_false(expr, msg, extras: Incomplete | None = ...) -> None: ...
+def skip(reason, extras: Incomplete | None = ...) -> None: ...
+def skip_if(expr, reason, extras: Incomplete | None = ...) -> None: ...
+def abort_class(reason, extras: Incomplete | None = ...) -> None: ...
+def abort_class_if(expr, reason, extras: Incomplete | None = ...) -> None: ...
+def abort_all(reason, extras: Incomplete | None = ...) -> None: ...
+def abort_all_if(expr, reason, extras: Incomplete | None = ...) -> None: ...
+def fail(msg, extras: Incomplete | None = ...) -> None: ...
+def explicit_pass(msg, extras: Incomplete | None = ...) -> None: ...
+
+class _AssertRaisesContext:
+    expected: Incomplete
+    failureException: Incomplete
+    expected_regexp: Incomplete
+    extras: Incomplete
+    def __init__(
+        self,
+        expected,
+        expected_regexp: Incomplete | None = ...,
+        extras: Incomplete | None = ...,
+    ) -> None: ...
+    def __enter__(self): ...
+    exception: Incomplete
+    def __exit__(self, exc_type, exc_value, tb): ...
diff --git a/stubs/mobly/base_instrumentation_test.pyi b/stubs/mobly/base_instrumentation_test.pyi
new file mode 100644
index 0000000..b06f307
--- /dev/null
+++ b/stubs/mobly/base_instrumentation_test.pyi
@@ -0,0 +1,97 @@
+from enum import Enum
+
+from _typeshed import Incomplete
+from mobly import base_test as base_test
+from mobly import records as records
+from mobly import signals as signals
+from mobly import utils as utils
+
+class _InstrumentationStructurePrefixes:
+    STATUS: str
+    STATUS_CODE: str
+    RESULT: str
+    CODE: str
+    FAILED: str
+
+class _InstrumentationKnownStatusKeys:
+    CLASS: str
+    ERROR: str
+    STACK: str
+    TEST: str
+    STREAM: str
+
+class _InstrumentationStatusCodes:
+    UNKNOWN: Incomplete
+    OK: str
+    START: str
+    IN_PROGRESS: str
+    ERROR: str
+    FAILURE: str
+    IGNORED: str
+    ASSUMPTION_FAILURE: str
+
+class _InstrumentationStatusCodeCategories:
+    TIMING: Incomplete
+    PASS: Incomplete
+    FAIL: Incomplete
+    SKIPPED: Incomplete
+
+class _InstrumentationKnownResultKeys:
+    LONGMSG: str
+    SHORTMSG: str
+
+class _InstrumentationResultSignals:
+    FAIL: str
+    PASS: str
+
+class _InstrumentationBlockStates(Enum):
+    UNKNOWN: int
+    METHOD: int
+    RESULT: int
+
+class _InstrumentationBlock:
+    state: Incomplete
+    prefix: Incomplete
+    previous_instrumentation_block: Incomplete
+    error_message: str
+    status_code: Incomplete
+    current_key: Incomplete
+    known_keys: Incomplete
+    unknown_keys: Incomplete
+    begin_time: Incomplete
+    def __init__(
+        self,
+        state=...,
+        prefix: Incomplete | None = ...,
+        previous_instrumentation_block: Incomplete | None = ...,
+    ) -> None: ...
+    @property
+    def is_empty(self): ...
+    def set_error_message(self, error_message) -> None: ...
+    def set_status_code(self, status_code_line) -> None: ...
+    def set_key(self, structure_prefix, key_line) -> None: ...
+    def add_value(self, line) -> None: ...
+    def transition_state(self, new_state): ...
+
+class _InstrumentationBlockFormatter:
+    DEFAULT_INSTRUMENTATION_METHOD_NAME: str
+    def __init__(self, instrumentation_block) -> None: ...
+    def create_test_record(self, mobly_test_class): ...
+    def has_completed_result_block_format(self, error_message): ...
+
+class InstrumentationTestMixin:
+    DEFAULT_INSTRUMENTATION_OPTION_PREFIX: str
+    DEFAULT_INSTRUMENTATION_ERROR_MESSAGE: str
+    def parse_instrumentation_options(self, parameters: Incomplete | None = ...): ...
+    def run_instrumentation_test(
+        self,
+        device,
+        package,
+        options: Incomplete | None = ...,
+        prefix: Incomplete | None = ...,
+        runner: Incomplete | None = ...,
+    ): ...
+
+class BaseInstrumentationTestClass(
+    InstrumentationTestMixin, base_test.BaseTestClass
+): ...
diff --git a/stubs/mobly/base_suite.pyi b/stubs/mobly/base_suite.pyi
new file mode 100644
index 0000000..48912a5
--- /dev/null
+++ b/stubs/mobly/base_suite.pyi
@@ -0,0 +1,18 @@
+import abc
+
+from _typeshed import Incomplete
+
+class BaseSuite(abc.ABC, metaclass=abc.ABCMeta):
+    def __init__(self, runner, config) -> None: ...
+    @property
+    def user_params(self): ...
+    def add_test_class(
+        self,
+        clazz,
+        config: Incomplete | None = ...,
+        tests: Incomplete | None = ...,
+        name_suffix: Incomplete | None = ...,
+    ) -> None: ...
+    @abc.abstractmethod
+    def setup_suite(self, config): ...
+    def teardown_suite(self) -> None: ...
diff --git a/stubs/mobly/base_test.pyi b/stubs/mobly/base_test.pyi
new file mode 100644
index 0000000..1f1d7d1
--- /dev/null
+++ b/stubs/mobly/base_test.pyi
@@ -0,0 +1,68 @@
+from _typeshed import Incomplete
+from mobly import controller_manager as controller_manager
+from mobly import expects as expects
+from mobly import records as records
+from mobly import runtime_test_info as runtime_test_info
+from mobly import signals as signals
+from mobly import utils as utils
+
+TEST_CASE_TOKEN: str
+RESULT_LINE_TEMPLATE: Incomplete
+TEST_STAGE_BEGIN_LOG_TEMPLATE: str
+TEST_STAGE_END_LOG_TEMPLATE: str
+STAGE_NAME_PRE_RUN: str
+STAGE_NAME_SETUP_GENERATED_TESTS: str
+STAGE_NAME_SETUP_CLASS: str
+STAGE_NAME_SETUP_TEST: str
+STAGE_NAME_TEARDOWN_TEST: str
+STAGE_NAME_TEARDOWN_CLASS: str
+STAGE_NAME_CLEAN_UP: str
+ATTR_REPEAT_CNT: str
+ATTR_MAX_RETRY_CNT: str
+ATTR_MAX_CONSEC_ERROR: str
+
+class Error(Exception): ...
+
+def repeat(count, max_consecutive_error: Incomplete | None = ...): ...
+def retry(max_count): ...
+
+class BaseTestClass:
+    TAG: Incomplete
+    tests: Incomplete
+    root_output_path: Incomplete
+    log_path: Incomplete
+    test_bed_name: Incomplete
+    testbed_name: Incomplete
+    user_params: Incomplete
+    results: Incomplete
+    summary_writer: Incomplete
+    controller_configs: Incomplete
+    def __init__(self, configs) -> None: ...
+    def unpack_userparams(
+        self,
+        req_param_names: Incomplete | None = ...,
+        opt_param_names: Incomplete | None = ...,
+        **kwargs,
+    ) -> None: ...
+    def register_controller(
+        self, module, required: bool = ..., min_number: int = ...
+    ): ...
+    def pre_run(self) -> None: ...
+    def setup_generated_tests(self) -> None: ...
+    def setup_class(self) -> None: ...
+    def teardown_class(self) -> None: ...
+    def setup_test(self) -> None: ...
+    def teardown_test(self) -> None: ...
+    def on_fail(self, record) -> None: ...
+    def on_pass(self, record) -> None: ...
+    def on_skip(self, record) -> None: ...
+    def record_data(self, content) -> None: ...
+    current_test_info: Incomplete
+    def exec_one_test(
+        self, test_name, test_method, record: Incomplete | None = ...
+    ): ...
+    def generate_tests(
+        self, test_logic, name_func, arg_sets, uid_func: Incomplete | None = ...
+    ) -> None: ...
+    def get_existing_test_names(self): ...
+    def run(self, test_names: Incomplete | None = ...): ...
diff --git a/stubs/mobly/config_parser.pyi b/stubs/mobly/config_parser.pyi
new file mode 100644
index 0000000..f9f74ff
--- /dev/null
+++ b/stubs/mobly/config_parser.pyi
@@ -0,0 +1,20 @@
+from _typeshed import Incomplete
+from mobly import keys as keys
+from mobly import utils as utils
+
+ENV_MOBLY_LOGPATH: str
+
+class MoblyConfigError(Exception): ...
+
+def load_test_config_file(test_config_path, tb_filters: Incomplete | None = ...): ...
+
+class TestRunConfig:
+    log_path: str
+    test_bed_name: Incomplete
+    testbed_name: Incomplete
+    controller_configs: Incomplete
+    user_params: Incomplete
+    summary_writer: Incomplete
+    test_class_name_suffix: Incomplete
+    def __init__(self) -> None: ...
+    def copy(self): ...
diff --git a/stubs/mobly/controller_manager.pyi b/stubs/mobly/controller_manager.pyi
new file mode 100644
index 0000000..6e59a30
--- /dev/null
+++ b/stubs/mobly/controller_manager.pyi
@@ -0,0 +1,15 @@
+from _typeshed import Incomplete
+from mobly import expects as expects
+from mobly import records as records
+from mobly import signals as signals
+
+def verify_controller_module(module) -> None: ...
+
+class ControllerManager:
+    controller_configs: Incomplete
+    def __init__(self, class_name, controller_configs) -> None: ...
+    def register_controller(
+        self, module, required: bool = ..., min_number: int = ...
+    ): ...
+    def unregister_controllers(self) -> None: ...
+    def get_controller_info_records(self): ...
diff --git a/src/antlion/controllers/__init__.py b/stubs/mobly/controllers/__init__.pyi
similarity index 100%
rename from src/antlion/controllers/__init__.py
rename to stubs/mobly/controllers/__init__.pyi
diff --git a/stubs/mobly/controllers/android_device.pyi b/stubs/mobly/controllers/android_device.pyi
new file mode 100644
index 0000000..80a352b
--- /dev/null
+++ b/stubs/mobly/controllers/android_device.pyi
@@ -0,0 +1,141 @@
+import enum
+import logging
+from collections.abc import Generator
+
+from _typeshed import Incomplete
+from mobly import runtime_test_info as runtime_test_info
+from mobly import utils as utils
+from mobly.controllers.android_device_lib import adb as adb
+from mobly.controllers.android_device_lib import errors as errors
+from mobly.controllers.android_device_lib import fastboot as fastboot
+from mobly.controllers.android_device_lib import service_manager as service_manager
+from mobly.controllers.android_device_lib.services import logcat as logcat
+from mobly.controllers.android_device_lib.services import (
+    snippet_management_service as snippet_management_service,
+)
+
+MBS_PACKAGE: str
+MOBLY_CONTROLLER_CONFIG_NAME: str
+ANDROID_DEVICE_PICK_ALL_TOKEN: str
+ANDROID_DEVICE_ADB_LOGCAT_PARAM_KEY: str
+ANDROID_DEVICE_EMPTY_CONFIG_MSG: str
+ANDROID_DEVICE_NOT_LIST_CONFIG_MSG: str
+CACHED_SYSTEM_PROPS: Incomplete
+KEY_DEVICE_REQUIRED: str
+DEFAULT_VALUE_DEVICE_REQUIRED: bool
+KEY_SKIP_LOGCAT: str
+DEFAULT_VALUE_SKIP_LOGCAT: bool
+SERVICE_NAME_LOGCAT: str
+DEFAULT_BUG_REPORT_NAME: str
+DEFAULT_TIMEOUT_BOOT_COMPLETION_SECOND: Incomplete
+TAKE_SCREENSHOT_TIMEOUT_SECOND: int
+Error = errors.Error
+DeviceError = errors.DeviceError
+SnippetError = snippet_management_service.Error
+EMULATOR_SERIAL_REGEX: Incomplete
+
+def create(configs): ...
+def destroy(ads) -> None: ...
+def get_info(ads): ...
+def parse_device_list(device_list_str, key): ...
+def list_adb_devices(): ...
+def list_adb_devices_by_usb_id(): ...
+def list_fastboot_devices(): ...
+def get_instances(serials): ...
+def get_instances_with_configs(configs): ...
+def get_all_instances(include_fastboot: bool = ...): ...
+def filter_devices(ads, func): ...
+def get_devices(ads, **kwargs): ...
+def get_device(ads, **kwargs): ...
+def take_bug_reports(
+    ads,
+    test_name: Incomplete | None = ...,
+    begin_time: Incomplete | None = ...,
+    destination: Incomplete | None = ...,
+) -> None: ...
+
+class BuildInfoConstants(enum.Enum):
+    BUILD_ID: Incomplete
+    BUILD_TYPE: Incomplete
+    BUILD_FINGERPRINT: Incomplete
+    BUILD_VERSION_CODENAME: Incomplete
+    BUILD_VERSION_INCREMENTAL: Incomplete
+    BUILD_VERSION_SDK: Incomplete
+    BUILD_PRODUCT: Incomplete
+    BUILD_CHARACTERISTICS: Incomplete
+    DEBUGGABLE: Incomplete
+    PRODUCT_NAME: Incomplete
+    HARDWARE: Incomplete
+    build_info_key: Incomplete
+    system_prop_key: Incomplete
+    def __init__(self, build_info_key, system_prop_key) -> None: ...
+
+class AndroidDevice:
+    log: Incomplete
+    adb: Incomplete
+    fastboot: Incomplete
+    services: Incomplete
+    def __init__(self, serial: str = ...) -> None: ...
+    @property
+    def adb_logcat_file_path(self): ...
+    @property
+    def device_info(self): ...
+    def add_device_info(self, name, info) -> None: ...
+    @property
+    def sl4a(self): ...
+    @property
+    def ed(self): ...
+    @property
+    def debug_tag(self): ...
+    @debug_tag.setter
+    def debug_tag(self, tag) -> None: ...
+    @property
+    def has_active_service(self): ...
+    @property
+    def log_path(self): ...
+    @log_path.setter
+    def log_path(self, new_path) -> None: ...
+    @property
+    def serial(self): ...
+    def update_serial(self, new_serial) -> None: ...
+    def handle_reboot(self) -> Generator[None, None, None]: ...
+    def handle_usb_disconnect(self) -> Generator[None, None, None]: ...
+    @property
+    def build_info(self): ...
+    @property
+    def is_bootloader(self): ...
+    @property
+    def is_adb_root(self): ...
+    @property
+    def is_rootable(self): ...
+    @property
+    def model(self): ...
+    @property
+    def is_emulator(self): ...
+    def load_config(self, config) -> None: ...
+    def root_adb(self) -> None: ...
+    def load_snippet(self, name, package) -> None: ...
+    def unload_snippet(self, name) -> None: ...
+    def generate_filename(
+        self,
+        file_type,
+        time_identifier: Incomplete | None = ...,
+        extension_name: Incomplete | None = ...,
+    ): ...
+    def take_bug_report(
+        self,
+        test_name: Incomplete | None = ...,
+        begin_time: Incomplete | None = ...,
+        timeout: int = ...,
+        destination: Incomplete | None = ...,
+    ): ...
+    def take_screenshot(self, destination, prefix: str = ...): ...
+    def run_iperf_client(self, server_host, extra_args: str = ...): ...
+    def wait_for_boot_completion(self, timeout=...) -> None: ...
+    def is_boot_completed(self): ...
+    def is_adb_detectable(self): ...
+    def reboot(self) -> None: ...
+    def __getattr__(self, name): ...
+
+class AndroidDeviceLoggerAdapter(logging.LoggerAdapter):
+    def process(self, msg, kwargs): ...
diff --git a/src/antlion/controllers/adb_lib/__init__.py b/stubs/mobly/controllers/android_device_lib/__init__.pyi
similarity index 100%
copy from src/antlion/controllers/adb_lib/__init__.py
copy to stubs/mobly/controllers/android_device_lib/__init__.pyi
diff --git a/stubs/mobly/controllers/android_device_lib/adb.pyi b/stubs/mobly/controllers/android_device_lib/adb.pyi
new file mode 100644
index 0000000..473537d
--- /dev/null
+++ b/stubs/mobly/controllers/android_device_lib/adb.pyi
@@ -0,0 +1,51 @@
+from _typeshed import Incomplete
+from mobly import utils as utils
+
+ADB: str
+ADB_PORT_LOCK: Incomplete
+ADB_ROOT_RETRY_ATTMEPTS: int
+ADB_ROOT_RETRY_ATTEMPT_INTERVAL_SEC: int
+DEFAULT_INSTRUMENTATION_RUNNER: str
+DEFAULT_GETPROP_TIMEOUT_SEC: int
+DEFAULT_GETPROPS_ATTEMPTS: int
+DEFAULT_GETPROPS_RETRY_SLEEP_SEC: int
+PATTERN_ADB_CONNECT_SUCCESS: Incomplete
+
+class Error(Exception): ...
+
+class AdbError(Error):
+    cmd: Incomplete
+    stdout: Incomplete
+    stderr: Incomplete
+    ret_code: Incomplete
+    serial: Incomplete
+    def __init__(self, cmd, stdout, stderr, ret_code, serial: str = ...) -> None: ...
+
+class AdbTimeoutError(Error):
+    cmd: Incomplete
+    timeout: Incomplete
+    serial: Incomplete
+    def __init__(self, cmd, timeout, serial: str = ...) -> None: ...
+
+def is_adb_available(): ...
+def list_occupied_adb_ports(): ...
+
+class AdbProxy:
+    serial: Incomplete
+    def __init__(self, serial: str = ...) -> None: ...
+    @property
+    def current_user_id(self) -> int: ...
+    def connect(self, address) -> bytes: ...
+    def getprop(self, prop_name): ...
+    def getprops(self, prop_names): ...
+    def has_shell_command(self, command) -> bool: ...
+    def forward(self, args: Incomplete | None = ..., shell: bool = ...) -> bytes: ...
+    def instrument(
+        self,
+        package,
+        options: Incomplete | None = ...,
+        runner: Incomplete | None = ...,
+        handler: Incomplete | None = ...,
+    ) -> bytes: ...
+    def root(self) -> bytes: ...
+    def __getattr__(self, name): ...
diff --git a/stubs/mobly/controllers/android_device_lib/callback_handler.pyi b/stubs/mobly/controllers/android_device_lib/callback_handler.pyi
new file mode 100644
index 0000000..0fb9383
--- /dev/null
+++ b/stubs/mobly/controllers/android_device_lib/callback_handler.pyi
@@ -0,0 +1,19 @@
+from _typeshed import Incomplete
+from mobly.controllers.android_device_lib import snippet_event as snippet_event
+from mobly.snippet import errors as errors
+
+MAX_TIMEOUT: Incomplete
+DEFAULT_TIMEOUT: int
+Error = errors.CallbackHandlerBaseError
+TimeoutError = errors.CallbackHandlerTimeoutError
+
+class CallbackHandler:
+    ret_value: Incomplete
+    def __init__(
+        self, callback_id, event_client, ret_value, method_name, ad
+    ) -> None: ...
+    @property
+    def callback_id(self): ...
+    def waitAndGet(self, event_name, timeout=...): ...
+    def waitForEvent(self, event_name, predicate, timeout=...): ...
+    def getAll(self, event_name): ...
diff --git a/stubs/mobly/controllers/android_device_lib/callback_handler_v2.pyi b/stubs/mobly/controllers/android_device_lib/callback_handler_v2.pyi
new file mode 100644
index 0000000..a24f38f
--- /dev/null
+++ b/stubs/mobly/controllers/android_device_lib/callback_handler_v2.pyi
@@ -0,0 +1,8 @@
+from mobly.snippet import callback_handler_base as callback_handler_base
+from mobly.snippet import errors as errors
+
+TIMEOUT_ERROR_MESSAGE: str
+
+class CallbackHandlerV2(callback_handler_base.CallbackHandlerBase):
+    def callEventWaitAndGetRpc(self, callback_id, event_name, timeout_sec): ...
+    def callEventGetAllRpc(self, callback_id, event_name): ...
diff --git a/stubs/mobly/controllers/android_device_lib/errors.pyi b/stubs/mobly/controllers/android_device_lib/errors.pyi
new file mode 100644
index 0000000..562da05
--- /dev/null
+++ b/stubs/mobly/controllers/android_device_lib/errors.pyi
@@ -0,0 +1,13 @@
+from _typeshed import Incomplete
+from mobly import signals as signals
+
+HIERARCHY_TOKEN: str
+
+class Error(signals.ControllerError): ...
+
+class DeviceError(Error):
+    def __init__(self, ad, msg) -> None: ...
+
+class ServiceError(DeviceError):
+    SERVICE_TYPE: Incomplete
+    def __init__(self, device, msg) -> None: ...
diff --git a/stubs/mobly/controllers/android_device_lib/event_dispatcher.pyi b/stubs/mobly/controllers/android_device_lib/event_dispatcher.pyi
new file mode 100644
index 0000000..4f63d2a
--- /dev/null
+++ b/stubs/mobly/controllers/android_device_lib/event_dispatcher.pyi
@@ -0,0 +1,36 @@
+from _typeshed import Incomplete
+
+class EventDispatcherError(Exception): ...
+class IllegalStateError(EventDispatcherError): ...
+class DuplicateError(EventDispatcherError): ...
+
+class EventDispatcher:
+    DEFAULT_TIMEOUT: int
+    started: bool
+    executor: Incomplete
+    poller: Incomplete
+    event_dict: Incomplete
+    handlers: Incomplete
+    lock: Incomplete
+    def __init__(self, sl4a) -> None: ...
+    def poll_events(self) -> None: ...
+    def register_handler(self, handler, event_name, args) -> None: ...
+    def start(self) -> None: ...
+    def clean_up(self) -> None: ...
+    def pop_event(self, event_name, timeout=...): ...
+    def wait_for_event(self, event_name, predicate, timeout=..., *args, **kwargs): ...
+    def pop_events(self, regex_pattern, timeout): ...
+    def get_event_q(self, event_name): ...
+    def handle_subscribed_event(self, event_obj, event_name) -> None: ...
+    def handle_event(
+        self,
+        event_handler,
+        event_name,
+        user_args,
+        event_timeout: Incomplete | None = ...,
+        cond: Incomplete | None = ...,
+        cond_timeout: Incomplete | None = ...,
+    ): ...
+    def pop_all(self, event_name): ...
+    def clear_events(self, event_name) -> None: ...
+    def clear_all_events(self) -> None: ...
diff --git a/stubs/mobly/controllers/android_device_lib/fastboot.pyi b/stubs/mobly/controllers/android_device_lib/fastboot.pyi
new file mode 100644
index 0000000..e734c1a
--- /dev/null
+++ b/stubs/mobly/controllers/android_device_lib/fastboot.pyi
@@ -0,0 +1,10 @@
+from _typeshed import Incomplete
+
+def exe_cmd(*cmds): ...
+
+class FastbootProxy:
+    serial: Incomplete
+    fastboot_str: Incomplete
+    def __init__(self, serial: str = ...) -> None: ...
+    def args(self, *args): ...
+    def __getattr__(self, name): ...
diff --git a/stubs/mobly/controllers/android_device_lib/jsonrpc_client_base.pyi b/stubs/mobly/controllers/android_device_lib/jsonrpc_client_base.pyi
new file mode 100644
index 0000000..550126d
--- /dev/null
+++ b/stubs/mobly/controllers/android_device_lib/jsonrpc_client_base.pyi
@@ -0,0 +1,36 @@
+import abc
+
+from _typeshed import Incomplete
+from mobly.controllers.android_device_lib import callback_handler as callback_handler
+from mobly.snippet import errors as errors
+
+UNKNOWN_UID: int
+Error = errors.Error
+AppStartError = errors.ServerStartError
+AppRestoreConnectionError = errors.ServerRestoreConnectionError
+ApiError = errors.ApiError
+ProtocolError = errors.ProtocolError
+
+class JsonRpcCommand:
+    INIT: str
+    CONTINUE: str
+
+class JsonRpcClientBase(abc.ABC):
+    host_port: Incomplete
+    device_port: Incomplete
+    app_name: Incomplete
+    log: Incomplete
+    uid: Incomplete
+    verbose_logging: bool
+    def __init__(self, app_name, ad) -> None: ...
+    def __del__(self) -> None: ...
+    def start_app_and_connect(self) -> None: ...
+    def stop_app(self) -> None: ...
+    def restore_app_connection(self, port: Incomplete | None = ...) -> None: ...
+    def connect(self, uid=..., cmd=...) -> None: ...
+    def disconnect(self) -> None: ...
+    def close_socket_connection(self) -> None: ...
+    def clear_host_port(self) -> None: ...
+    def disable_hidden_api_blacklist(self) -> None: ...
+    def __getattr__(self, name): ...
+    def set_snippet_client_verbose_logging(self, verbose) -> None: ...
diff --git a/stubs/mobly/controllers/android_device_lib/jsonrpc_shell_base.pyi b/stubs/mobly/controllers/android_device_lib/jsonrpc_shell_base.pyi
new file mode 100644
index 0000000..6033e90
--- /dev/null
+++ b/stubs/mobly/controllers/android_device_lib/jsonrpc_shell_base.pyi
@@ -0,0 +1,9 @@
+from _typeshed import Incomplete
+from mobly.controllers import android_device as android_device
+
+class Error(Exception): ...
+
+class JsonRpcShellBase:
+    def load_device(self, serial: Incomplete | None = ...) -> None: ...
+    def start_console(self) -> None: ...
+    def main(self, serial: Incomplete | None = ...) -> None: ...
diff --git a/stubs/mobly/controllers/android_device_lib/service_manager.pyi b/stubs/mobly/controllers/android_device_lib/service_manager.pyi
new file mode 100644
index 0000000..06aad4e
--- /dev/null
+++ b/stubs/mobly/controllers/android_device_lib/service_manager.pyi
@@ -0,0 +1,31 @@
+from _typeshed import Incomplete
+from mobly import expects as expects
+from mobly.controllers.android_device_lib import errors as errors
+from mobly.controllers.android_device_lib.services import base_service as base_service
+
+class Error(errors.DeviceError): ...
+
+class ServiceManager:
+    def __init__(self, device) -> None: ...
+    def has_service_by_name(self, name): ...
+    @property
+    def is_any_alive(self): ...
+    def register(
+        self,
+        alias,
+        service_class,
+        configs: Incomplete | None = ...,
+        start_service: bool = ...,
+    ) -> None: ...
+    def unregister(self, alias) -> None: ...
+    def for_each(self, func) -> None: ...
+    def list_live_services(self): ...
+    def create_output_excerpts_all(self, test_info): ...
+    def unregister_all(self) -> None: ...
+    def start_all(self) -> None: ...
+    def start_services(self, service_alises) -> None: ...
+    def stop_all(self) -> None: ...
+    def pause_all(self) -> None: ...
+    def resume_all(self) -> None: ...
+    def resume_services(self, service_alises) -> None: ...
+    def __getattr__(self, name): ...
diff --git a/src/antlion/test_utils/abstract_devices/__init__.py b/stubs/mobly/controllers/android_device_lib/services/__init__.pyi
similarity index 100%
copy from src/antlion/test_utils/abstract_devices/__init__.py
copy to stubs/mobly/controllers/android_device_lib/services/__init__.pyi
diff --git a/stubs/mobly/controllers/android_device_lib/services/base_service.pyi b/stubs/mobly/controllers/android_device_lib/services/base_service.pyi
new file mode 100644
index 0000000..c99f0e7
--- /dev/null
+++ b/stubs/mobly/controllers/android_device_lib/services/base_service.pyi
@@ -0,0 +1,17 @@
+import abc
+
+from _typeshed import Incomplete
+
+class BaseService(abc.ABC):
+    def __init__(self, device, configs: Incomplete | None = ...) -> None: ...
+    @property
+    def alias(self): ...
+    @alias.setter
+    def alias(self, alias) -> None: ...
+    @property
+    def is_alive(self) -> None: ...
+    def start(self) -> None: ...
+    def stop(self) -> None: ...
+    def pause(self) -> None: ...
+    def resume(self) -> None: ...
+    def create_output_excerpts(self, test_info): ...
diff --git a/stubs/mobly/controllers/android_device_lib/services/logcat.pyi b/stubs/mobly/controllers/android_device_lib/services/logcat.pyi
new file mode 100644
index 0000000..e21d22e
--- /dev/null
+++ b/stubs/mobly/controllers/android_device_lib/services/logcat.pyi
@@ -0,0 +1,35 @@
+from _typeshed import Incomplete
+from mobly import utils as utils
+from mobly.controllers.android_device_lib import adb as adb
+from mobly.controllers.android_device_lib import errors as errors
+from mobly.controllers.android_device_lib.services import base_service as base_service
+
+CREATE_LOGCAT_FILE_TIMEOUT_SEC: int
+
+class Error(errors.ServiceError):
+    SERVICE_TYPE: str
+
+class Config:
+    clear_log: Incomplete
+    logcat_params: Incomplete
+    output_file_path: Incomplete
+    def __init__(
+        self,
+        logcat_params: Incomplete | None = ...,
+        clear_log: bool = ...,
+        output_file_path: Incomplete | None = ...,
+    ) -> None: ...
+
+class Logcat(base_service.BaseService):
+    OUTPUT_FILE_TYPE: str
+    adb_logcat_file_path: Incomplete
+    def __init__(self, android_device, configs: Incomplete | None = ...) -> None: ...
+    def create_output_excerpts(self, test_info): ...
+    @property
+    def is_alive(self): ...
+    def clear_adb_log(self) -> None: ...
+    def update_config(self, new_config) -> None: ...
+    def start(self) -> None: ...
+    def stop(self) -> None: ...
+    def pause(self) -> None: ...
+    def resume(self) -> None: ...
diff --git a/stubs/mobly/controllers/android_device_lib/services/sl4a_service.pyi b/stubs/mobly/controllers/android_device_lib/services/sl4a_service.pyi
new file mode 100644
index 0000000..1a0f295
--- /dev/null
+++ b/stubs/mobly/controllers/android_device_lib/services/sl4a_service.pyi
@@ -0,0 +1,13 @@
+from _typeshed import Incomplete
+from mobly.controllers.android_device_lib import sl4a_client as sl4a_client
+from mobly.controllers.android_device_lib.services import base_service as base_service
+
+class Sl4aService(base_service.BaseService):
+    def __init__(self, device, configs: Incomplete | None = ...) -> None: ...
+    @property
+    def is_alive(self): ...
+    def start(self) -> None: ...
+    def stop(self) -> None: ...
+    def pause(self) -> None: ...
+    def resume(self) -> None: ...
+    def __getattr__(self, name): ...
diff --git a/stubs/mobly/controllers/android_device_lib/services/snippet_management_service.pyi b/stubs/mobly/controllers/android_device_lib/services/snippet_management_service.pyi
new file mode 100644
index 0000000..7ca5056
--- /dev/null
+++ b/stubs/mobly/controllers/android_device_lib/services/snippet_management_service.pyi
@@ -0,0 +1,22 @@
+from _typeshed import Incomplete
+from mobly.controllers.android_device_lib import errors as errors
+from mobly.controllers.android_device_lib import snippet_client_v2 as snippet_client_v2
+from mobly.controllers.android_device_lib.services import base_service as base_service
+
+MISSING_SNIPPET_CLIENT_MSG: str
+
+class Error(errors.ServiceError):
+    SERVICE_TYPE: str
+
+class SnippetManagementService(base_service.BaseService):
+    def __init__(self, device, configs: Incomplete | None = ...) -> None: ...
+    @property
+    def is_alive(self): ...
+    def get_snippet_client(self, name): ...
+    def add_snippet_client(self, name, package) -> None: ...
+    def remove_snippet_client(self, name) -> None: ...
+    def start(self) -> None: ...
+    def stop(self) -> None: ...
+    def pause(self) -> None: ...
+    def resume(self) -> None: ...
+    def __getattr__(self, name): ...
diff --git a/stubs/mobly/controllers/android_device_lib/sl4a_client.pyi b/stubs/mobly/controllers/android_device_lib/sl4a_client.pyi
new file mode 100644
index 0000000..9b12fc2
--- /dev/null
+++ b/stubs/mobly/controllers/android_device_lib/sl4a_client.pyi
@@ -0,0 +1,16 @@
+from _typeshed import Incomplete
+from mobly import utils as utils
+from mobly.controllers.android_device_lib import event_dispatcher as event_dispatcher
+from mobly.controllers.android_device_lib import (
+    jsonrpc_client_base as jsonrpc_client_base,
+)
+
+class Sl4aClient(jsonrpc_client_base.JsonRpcClientBase):
+    ed: Incomplete
+    def __init__(self, ad) -> None: ...
+    device_port: Incomplete
+    def start_app_and_connect(self) -> None: ...
+    host_port: Incomplete
+    def restore_app_connection(self, port: Incomplete | None = ...) -> None: ...
+    def stop_app(self) -> None: ...
+    def stop_event_dispatcher(self) -> None: ...
diff --git a/stubs/mobly/controllers/android_device_lib/snippet_client.pyi b/stubs/mobly/controllers/android_device_lib/snippet_client.pyi
new file mode 100644
index 0000000..96f0a88
--- /dev/null
+++ b/stubs/mobly/controllers/android_device_lib/snippet_client.pyi
@@ -0,0 +1,24 @@
+from _typeshed import Incomplete
+from mobly import utils as utils
+from mobly.controllers.android_device_lib import adb as adb
+from mobly.controllers.android_device_lib import errors as errors
+from mobly.controllers.android_device_lib import (
+    jsonrpc_client_base as jsonrpc_client_base,
+)
+from mobly.snippet import errors as snippet_errors
+
+AppStartPreCheckError = snippet_errors.ServerStartPreCheckError
+ProtocolVersionError = snippet_errors.ServerStartProtocolError
+
+class SnippetClient(jsonrpc_client_base.JsonRpcClientBase):
+    package: Incomplete
+    def __init__(self, package, ad) -> None: ...
+    @property
+    def is_alive(self): ...
+    @property
+    def user_id(self): ...
+    def start_app_and_connect(self) -> None: ...
+    host_port: Incomplete
+    def restore_app_connection(self, port: Incomplete | None = ...) -> None: ...
+    def stop_app(self) -> None: ...
+    def help(self, print_output: bool = ...): ...
diff --git a/stubs/mobly/controllers/android_device_lib/snippet_client_v2.pyi b/stubs/mobly/controllers/android_device_lib/snippet_client_v2.pyi
new file mode 100644
index 0000000..92fd966
--- /dev/null
+++ b/stubs/mobly/controllers/android_device_lib/snippet_client_v2.pyi
@@ -0,0 +1,41 @@
+import enum
+
+from _typeshed import Incomplete
+from mobly import utils as utils
+from mobly.controllers.android_device_lib import adb as adb
+from mobly.controllers.android_device_lib import (
+    callback_handler_v2 as callback_handler_v2,
+)
+from mobly.snippet import client_base as client_base
+from mobly.snippet import errors as errors
+
+UNKNOWN_UID: int
+
+class ConnectionHandshakeCommand(enum.Enum):
+    INIT: str
+    CONTINUE: str
+
+class SnippetClientV2(client_base.ClientBase):
+    host_port: Incomplete
+    device_port: Incomplete
+    uid: Incomplete
+    def __init__(self, package, ad) -> None: ...
+    @property
+    def user_id(self): ...
+    @property
+    def is_alive(self): ...
+    def before_starting_server(self) -> None: ...
+    def start_server(self) -> None: ...
+    def make_connection(self) -> None: ...
+    def create_socket_connection(self) -> None: ...
+    def send_handshake_request(self, uid=..., cmd=...) -> None: ...
+    def check_server_proc_running(self) -> None: ...
+    def send_rpc_request(self, request): ...
+    def handle_callback(self, callback_id, ret_value, rpc_func_name): ...
+    def make_connection_with_forwarded_port(
+        self, host_port, device_port, uid=..., cmd=...
+    ) -> None: ...
+    def stop(self) -> None: ...
+    def close_connection(self) -> None: ...
+    def restore_server_connection(self, port: Incomplete | None = ...) -> None: ...
+    def help(self, print_output: bool = ...): ...
diff --git a/stubs/mobly/controllers/android_device_lib/snippet_event.pyi b/stubs/mobly/controllers/android_device_lib/snippet_event.pyi
new file mode 100644
index 0000000..5d99106
--- /dev/null
+++ b/stubs/mobly/controllers/android_device_lib/snippet_event.pyi
@@ -0,0 +1,10 @@
+from _typeshed import Incomplete
+
+def from_dict(event_dict): ...
+
+class SnippetEvent:
+    callback_id: Incomplete
+    name: Incomplete
+    creation_time: Incomplete
+    data: Incomplete
+    def __init__(self, callback_id, name, creation_time, data) -> None: ...
diff --git a/stubs/mobly/controllers/attenuator.pyi b/stubs/mobly/controllers/attenuator.pyi
new file mode 100644
index 0000000..e07d7b9
--- /dev/null
+++ b/stubs/mobly/controllers/attenuator.pyi
@@ -0,0 +1,24 @@
+from _typeshed import Incomplete
+
+MOBLY_CONTROLLER_CONFIG_NAME: str
+KEY_ADDRESS: str
+KEY_PORT: str
+KEY_MODEL: str
+KEY_PATHS: str
+PACKAGE_PATH_TEMPLATE: str
+
+def create(configs): ...
+def destroy(objs) -> None: ...
+
+class Error(Exception): ...
+
+class AttenuatorPath:
+    model: Incomplete
+    attenuation_device: Incomplete
+    idx: Incomplete
+    def __init__(
+        self, attenuation_device, idx: int = ..., name: Incomplete | None = ...
+    ) -> None: ...
+    def set_atten(self, value) -> None: ...
+    def get_atten(self): ...
+    def get_max_atten(self): ...
diff --git a/src/antlion/controllers/attenuator_lib/__init__.py b/stubs/mobly/controllers/attenuator_lib/__init__.pyi
similarity index 100%
copy from src/antlion/controllers/attenuator_lib/__init__.py
copy to stubs/mobly/controllers/attenuator_lib/__init__.pyi
diff --git a/stubs/mobly/controllers/attenuator_lib/minicircuits.pyi b/stubs/mobly/controllers/attenuator_lib/minicircuits.pyi
new file mode 100644
index 0000000..5dfa6e4
--- /dev/null
+++ b/stubs/mobly/controllers/attenuator_lib/minicircuits.pyi
@@ -0,0 +1,15 @@
+from _typeshed import Incomplete
+from mobly.controllers import attenuator as attenuator
+from mobly.controllers.attenuator_lib import telnet_scpi_client as telnet_scpi_client
+
+class AttenuatorDevice:
+    path_count: Incomplete
+    def __init__(self, path_count: int = ...) -> None: ...
+    @property
+    def is_open(self): ...
+    properties: Incomplete
+    max_atten: Incomplete
+    def open(self, host, port: int = ...) -> None: ...
+    def close(self) -> None: ...
+    def set_atten(self, idx, value) -> None: ...
+    def get_atten(self, idx: int = ...): ...
diff --git a/stubs/mobly/controllers/attenuator_lib/telnet_scpi_client.pyi b/stubs/mobly/controllers/attenuator_lib/telnet_scpi_client.pyi
new file mode 100644
index 0000000..3ebb042
--- /dev/null
+++ b/stubs/mobly/controllers/attenuator_lib/telnet_scpi_client.pyi
@@ -0,0 +1,20 @@
+from _typeshed import Incomplete
+from mobly.controllers import attenuator as attenuator
+
+class TelnetScpiClient:
+    tx_cmd_separator: Incomplete
+    rx_cmd_separator: Incomplete
+    prompt: Incomplete
+    host: Incomplete
+    port: Incomplete
+    def __init__(
+        self,
+        tx_cmd_separator: str = ...,
+        rx_cmd_separator: str = ...,
+        prompt: str = ...,
+    ) -> None: ...
+    def open(self, host, port: int = ...) -> None: ...
+    @property
+    def is_open(self): ...
+    def close(self) -> None: ...
+    def cmd(self, cmd_str, wait_ret: bool = ...): ...
diff --git a/stubs/mobly/controllers/iperf_server.pyi b/stubs/mobly/controllers/iperf_server.pyi
new file mode 100644
index 0000000..29fd940
--- /dev/null
+++ b/stubs/mobly/controllers/iperf_server.pyi
@@ -0,0 +1,31 @@
+from _typeshed import Incomplete
+from mobly import utils as utils
+
+MOBLY_CONTROLLER_CONFIG_NAME: str
+
+def create(configs): ...
+def destroy(objs) -> None: ...
+
+class IPerfResult:
+    result: Incomplete
+    def __init__(self, result_path) -> None: ...
+    def get_json(self): ...
+    @property
+    def error(self): ...
+    @property
+    def avg_rate(self): ...
+    @property
+    def avg_receive_rate(self): ...
+    @property
+    def avg_send_rate(self): ...
+
+class IPerfServer:
+    port: Incomplete
+    log_path: Incomplete
+    iperf_str: Incomplete
+    iperf_process: Incomplete
+    log_files: Incomplete
+    started: bool
+    def __init__(self, port, log_path) -> None: ...
+    def start(self, extra_args: str = ..., tag: str = ...) -> None: ...
+    def stop(self) -> None: ...
diff --git a/stubs/mobly/controllers/sniffer.pyi b/stubs/mobly/controllers/sniffer.pyi
new file mode 100644
index 0000000..7ee9062
--- /dev/null
+++ b/stubs/mobly/controllers/sniffer.pyi
@@ -0,0 +1,36 @@
+from _typeshed import Incomplete
+
+MOBLY_CONTROLLER_CONFIG_NAME: str
+
+def create(configs): ...
+def destroy(objs) -> None: ...
+
+class SnifferError(Exception): ...
+class InvalidDataError(Exception): ...
+class ExecutionError(SnifferError): ...
+class InvalidOperationError(SnifferError): ...
+
+class Sniffer:
+    CONFIG_KEY_CHANNEL: str
+    def __init__(
+        self, interface, logger, base_configs: Incomplete | None = ...
+    ) -> None: ...
+    def get_descriptor(self) -> None: ...
+    def get_type(self) -> None: ...
+    def get_subtype(self) -> None: ...
+    def get_interface(self) -> None: ...
+    def get_capture_file(self) -> None: ...
+    def start_capture(
+        self,
+        override_configs: Incomplete | None = ...,
+        additional_args: Incomplete | None = ...,
+        duration: Incomplete | None = ...,
+        packet_count: Incomplete | None = ...,
+    ) -> None: ...
+    def stop_capture(self) -> None: ...
+    def wait_for_capture(self, timeout: Incomplete | None = ...) -> None: ...
+
+class ActiveCaptureContext:
+    def __init__(self, sniffer, timeout: Incomplete | None = ...) -> None: ...
+    def __enter__(self) -> None: ...
+    def __exit__(self, type, value, traceback) -> None: ...
diff --git a/src/antlion/controllers/sniffer_lib/__init__.py b/stubs/mobly/controllers/sniffer_lib/__init__.pyi
similarity index 100%
copy from src/antlion/controllers/sniffer_lib/__init__.py
copy to stubs/mobly/controllers/sniffer_lib/__init__.pyi
diff --git a/src/antlion/controllers/sniffer_lib/local/__init__.py b/stubs/mobly/controllers/sniffer_lib/local/__init__.pyi
similarity index 100%
copy from src/antlion/controllers/sniffer_lib/local/__init__.py
copy to stubs/mobly/controllers/sniffer_lib/local/__init__.pyi
diff --git a/stubs/mobly/controllers/sniffer_lib/local/local_base.pyi b/stubs/mobly/controllers/sniffer_lib/local/local_base.pyi
new file mode 100644
index 0000000..4e56926
--- /dev/null
+++ b/stubs/mobly/controllers/sniffer_lib/local/local_base.pyi
@@ -0,0 +1,21 @@
+from _typeshed import Incomplete
+from mobly import logger as logger
+from mobly import utils as utils
+from mobly.controllers import sniffer as sniffer
+
+class SnifferLocalBase(sniffer.Sniffer):
+    def __init__(
+        self, interface, logger, base_configs: Incomplete | None = ...
+    ) -> None: ...
+    def get_interface(self): ...
+    def get_type(self): ...
+    def get_capture_file(self): ...
+    def start_capture(
+        self,
+        override_configs: Incomplete | None = ...,
+        additional_args: Incomplete | None = ...,
+        duration: Incomplete | None = ...,
+        packet_count: Incomplete | None = ...,
+    ): ...
+    def stop_capture(self) -> None: ...
+    def wait_for_capture(self, timeout: Incomplete | None = ...) -> None: ...
diff --git a/stubs/mobly/controllers/sniffer_lib/local/tcpdump.pyi b/stubs/mobly/controllers/sniffer_lib/local/tcpdump.pyi
new file mode 100644
index 0000000..2cc12b3
--- /dev/null
+++ b/stubs/mobly/controllers/sniffer_lib/local/tcpdump.pyi
@@ -0,0 +1,10 @@
+from _typeshed import Incomplete
+from mobly.controllers import sniffer as sniffer
+from mobly.controllers.sniffer_lib.local import local_base as local_base
+
+class Sniffer(local_base.SnifferLocalBase):
+    def __init__(
+        self, config_path, logger, base_configs: Incomplete | None = ...
+    ) -> None: ...
+    def get_descriptor(self): ...
+    def get_subtype(self): ...
diff --git a/stubs/mobly/controllers/sniffer_lib/local/tshark.pyi b/stubs/mobly/controllers/sniffer_lib/local/tshark.pyi
new file mode 100644
index 0000000..2cc12b3
--- /dev/null
+++ b/stubs/mobly/controllers/sniffer_lib/local/tshark.pyi
@@ -0,0 +1,10 @@
+from _typeshed import Incomplete
+from mobly.controllers import sniffer as sniffer
+from mobly.controllers.sniffer_lib.local import local_base as local_base
+
+class Sniffer(local_base.SnifferLocalBase):
+    def __init__(
+        self, config_path, logger, base_configs: Incomplete | None = ...
+    ) -> None: ...
+    def get_descriptor(self): ...
+    def get_subtype(self): ...
diff --git a/stubs/mobly/expects.pyi b/stubs/mobly/expects.pyi
new file mode 100644
index 0000000..e7aa36c
--- /dev/null
+++ b/stubs/mobly/expects.pyi
@@ -0,0 +1,28 @@
+from collections.abc import Generator
+
+from _typeshed import Incomplete
+from mobly import asserts as asserts
+from mobly import records as records
+from mobly import signals as signals
+
+DEFAULT_TEST_RESULT_RECORD: Incomplete
+
+class _ExpectErrorRecorder:
+    def __init__(self, record: Incomplete | None = ...) -> None: ...
+    def reset_internal_states(self, record: Incomplete | None = ...) -> None: ...
+    @property
+    def has_error(self): ...
+    @property
+    def error_count(self): ...
+    def add_error(self, error) -> None: ...
+
+def expect_true(condition, msg, extras: Incomplete | None = ...) -> None: ...
+def expect_false(condition, msg, extras: Incomplete | None = ...) -> None: ...
+def expect_equal(
+    first, second, msg: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> None: ...
+def expect_no_raises(
+    message: Incomplete | None = ..., extras: Incomplete | None = ...
+) -> Generator[None, None, None]: ...
+
+recorder: Incomplete
diff --git a/stubs/mobly/keys.pyi b/stubs/mobly/keys.pyi
new file mode 100644
index 0000000..9007329
--- /dev/null
+++ b/stubs/mobly/keys.pyi
@@ -0,0 +1,9 @@
+import enum
+
+class Config(enum.Enum):
+    key_mobly_params: str
+    key_log_path: str
+    key_testbed: str
+    key_testbed_name: str
+    key_testbed_controllers: str
+    key_testbed_test_params: str
diff --git a/stubs/mobly/logger.pyi b/stubs/mobly/logger.pyi
new file mode 100644
index 0000000..86ccf16
--- /dev/null
+++ b/stubs/mobly/logger.pyi
@@ -0,0 +1,35 @@
+import logging
+
+from _typeshed import Incomplete
+from mobly import records as records
+from mobly import utils as utils
+
+LINUX_MAX_FILENAME_LENGTH: int
+WINDOWS_MAX_FILENAME_LENGTH: int
+WINDOWS_RESERVED_CHARACTERS_REPLACEMENTS: Incomplete
+WINDOWS_RESERVED_FILENAME_REGEX: Incomplete
+WINDOWS_RESERVED_FILENAME_PREFIX: str
+log_line_format: str
+log_line_time_format: str
+log_line_timestamp_len: int
+logline_timestamp_re: Incomplete
+
+def is_valid_logline_timestamp(timestamp): ...
+def logline_timestamp_comparator(t1, t2): ...
+def epoch_to_log_line_timestamp(epoch_time, time_zone: Incomplete | None = ...): ...
+def get_log_line_timestamp(delta: Incomplete | None = ...): ...
+def get_log_file_timestamp(delta: Incomplete | None = ...): ...
+def kill_test_logger(logger) -> None: ...
+def create_latest_log_alias(actual_path, alias) -> None: ...
+def setup_test_logger(
+    log_path, prefix: Incomplete | None = ..., alias: str = ..., console_level=...
+) -> None: ...
+def sanitize_filename(filename): ...
+def normalize_log_line_timestamp(log_line_timestamp): ...
+
+class PrefixLoggerAdapter(logging.LoggerAdapter):
+    EXTRA_KEY_LOG_PREFIX: str
+    _KWARGS_TYPE: Incomplete
+    _PROCESS_RETURN_TYPE: Incomplete
+    extra: _KWARGS_TYPE
+    def process(self, msg: str, kwargs: _KWARGS_TYPE) -> _PROCESS_RETURN_TYPE: ...
diff --git a/stubs/mobly/records.pyi b/stubs/mobly/records.pyi
new file mode 100644
index 0000000..2ae6905
--- /dev/null
+++ b/stubs/mobly/records.pyi
@@ -0,0 +1,118 @@
+import enum
+
+from _typeshed import Incomplete
+from mobly import signals as signals
+from mobly import utils as utils
+
+OUTPUT_FILE_INFO_LOG: str
+OUTPUT_FILE_DEBUG_LOG: str
+OUTPUT_FILE_SUMMARY: str
+
+class Error(Exception): ...
+
+def uid(uid): ...
+
+class TestSummaryEntryType(enum.Enum):
+    TEST_NAME_LIST: str
+    RECORD: str
+    SUMMARY: str
+    CONTROLLER_INFO: str
+    USER_DATA: str
+
+class TestSummaryWriter:
+    def __init__(self, path) -> None: ...
+    def __copy__(self): ...
+    def __deepcopy__(self, *args): ...
+    def dump(self, content, entry_type) -> None: ...
+
+class TestResultEnums:
+    RECORD_NAME: str
+    RECORD_CLASS: str
+    RECORD_BEGIN_TIME: str
+    RECORD_END_TIME: str
+    RECORD_RESULT: str
+    RECORD_UID: str
+    RECORD_EXTRAS: str
+    RECORD_EXTRA_ERRORS: str
+    RECORD_DETAILS: str
+    RECORD_TERMINATION_SIGNAL_TYPE: str
+    RECORD_STACKTRACE: str
+    RECORD_SIGNATURE: str
+    RECORD_RETRY_PARENT: str
+    RECORD_POSITION: str
+    TEST_RESULT_PASS: str
+    TEST_RESULT_FAIL: str
+    TEST_RESULT_SKIP: str
+    TEST_RESULT_ERROR: str
+
+class ControllerInfoRecord:
+    KEY_TEST_CLASS: Incomplete
+    KEY_CONTROLLER_NAME: str
+    KEY_CONTROLLER_INFO: str
+    KEY_TIMESTAMP: str
+    test_class: Incomplete
+    controller_name: Incomplete
+    controller_info: Incomplete
+    timestamp: Incomplete
+    def __init__(self, test_class, controller_name, info) -> None: ...
+    def to_dict(self): ...
+
+class ExceptionRecord:
+    exception: Incomplete
+    type: Incomplete
+    stacktrace: Incomplete
+    extras: Incomplete
+    position: Incomplete
+    is_test_signal: Incomplete
+    def __init__(self, e, position: Incomplete | None = ...) -> None: ...
+    def to_dict(self): ...
+    def __deepcopy__(self, memo): ...
+
+class TestResultRecord:
+    test_name: Incomplete
+    test_class: Incomplete
+    begin_time: Incomplete
+    end_time: Incomplete
+    uid: Incomplete
+    signature: Incomplete
+    retry_parent: Incomplete
+    termination_signal: Incomplete
+    extra_errors: Incomplete
+    result: Incomplete
+    def __init__(self, t_name, t_class: Incomplete | None = ...) -> None: ...
+    @property
+    def details(self): ...
+    @property
+    def termination_signal_type(self): ...
+    @property
+    def stacktrace(self): ...
+    @property
+    def extras(self): ...
+    def test_begin(self) -> None: ...
+    def update_record(self) -> None: ...
+    def test_pass(self, e: Incomplete | None = ...) -> None: ...
+    def test_fail(self, e: Incomplete | None = ...) -> None: ...
+    def test_skip(self, e: Incomplete | None = ...) -> None: ...
+    def test_error(self, e: Incomplete | None = ...) -> None: ...
+    def add_error(self, position, e) -> None: ...
+    def to_dict(self): ...
+
+class TestResult:
+    requested: Incomplete
+    failed: Incomplete
+    executed: Incomplete
+    passed: Incomplete
+    skipped: Incomplete
+    error: Incomplete
+    controller_info: Incomplete
+    def __init__(self) -> None: ...
+    def __add__(self, r): ...
+    def add_record(self, record) -> None: ...
+    def add_controller_info_record(self, controller_info_record) -> None: ...
+    def add_class_error(self, test_record) -> None: ...
+    def is_test_executed(self, test_name): ...
+    @property
+    def is_all_pass(self): ...
+    def requested_test_names_dict(self): ...
+    def summary_str(self): ...
+    def summary_dict(self): ...
diff --git a/stubs/mobly/runtime_test_info.pyi b/stubs/mobly/runtime_test_info.pyi
new file mode 100644
index 0000000..9dd1733
--- /dev/null
+++ b/stubs/mobly/runtime_test_info.pyi
@@ -0,0 +1,12 @@
+from mobly import utils as utils
+
+class RuntimeTestInfo:
+    def __init__(self, test_name, log_path, record) -> None: ...
+    @property
+    def name(self): ...
+    @property
+    def signature(self): ...
+    @property
+    def record(self): ...
+    @property
+    def output_path(self): ...
diff --git a/stubs/mobly/signals.pyi b/stubs/mobly/signals.pyi
new file mode 100644
index 0000000..f4fbe53
--- /dev/null
+++ b/stubs/mobly/signals.pyi
@@ -0,0 +1,17 @@
+from _typeshed import Incomplete
+
+class TestSignalError(Exception): ...
+
+class TestSignal(Exception):
+    details: Incomplete
+    extras: Incomplete
+    def __init__(self, details, extras: Incomplete | None = ...) -> None: ...
+
+class TestError(TestSignal): ...
+class TestFailure(TestSignal): ...
+class TestPass(TestSignal): ...
+class TestSkip(TestSignal): ...
+class TestAbortSignal(TestSignal): ...
+class TestAbortClass(TestAbortSignal): ...
+class TestAbortAll(TestAbortSignal): ...
+class ControllerError(Exception): ...
diff --git a/src/antlion/event/__init__.py b/stubs/mobly/snippet/__init__.pyi
similarity index 100%
copy from src/antlion/event/__init__.py
copy to stubs/mobly/snippet/__init__.pyi
diff --git a/stubs/mobly/snippet/callback_event.pyi b/stubs/mobly/snippet/callback_event.pyi
new file mode 100644
index 0000000..f3dfbb9
--- /dev/null
+++ b/stubs/mobly/snippet/callback_event.pyi
@@ -0,0 +1,10 @@
+from _typeshed import Incomplete
+
+def from_dict(event_dict): ...
+
+class CallbackEvent:
+    callback_id: Incomplete
+    name: Incomplete
+    creation_time: Incomplete
+    data: Incomplete
+    def __init__(self, callback_id, name, creation_time, data) -> None: ...
diff --git a/stubs/mobly/snippet/callback_handler_base.pyi b/stubs/mobly/snippet/callback_handler_base.pyi
new file mode 100644
index 0000000..11502d0
--- /dev/null
+++ b/stubs/mobly/snippet/callback_handler_base.pyi
@@ -0,0 +1,31 @@
+import abc
+
+from _typeshed import Incomplete
+from mobly.snippet import callback_event as callback_event
+from mobly.snippet import errors as errors
+
+class CallbackHandlerBase(abc.ABC, metaclass=abc.ABCMeta):
+    ret_value: Incomplete
+    def __init__(
+        self,
+        callback_id,
+        event_client,
+        ret_value,
+        method_name,
+        device,
+        rpc_max_timeout_sec,
+        default_timeout_sec: int = ...,
+    ) -> None: ...
+    @property
+    def rpc_max_timeout_sec(self): ...
+    @property
+    def default_timeout_sec(self): ...
+    @property
+    def callback_id(self): ...
+    @abc.abstractmethod
+    def callEventWaitAndGetRpc(self, callback_id, event_name, timeout_sec): ...
+    @abc.abstractmethod
+    def callEventGetAllRpc(self, callback_id, event_name): ...
+    def waitAndGet(self, event_name, timeout: Incomplete | None = ...): ...
+    def waitForEvent(self, event_name, predicate, timeout: Incomplete | None = ...): ...
+    def getAll(self, event_name): ...
diff --git a/stubs/mobly/snippet/client_base.pyi b/stubs/mobly/snippet/client_base.pyi
new file mode 100644
index 0000000..92d4b9c
--- /dev/null
+++ b/stubs/mobly/snippet/client_base.pyi
@@ -0,0 +1,34 @@
+import abc
+
+from _typeshed import Incomplete
+from mobly.snippet import errors as errors
+
+RPC_RESPONSE_REQUIRED_FIELDS: Incomplete
+
+class ClientBase(abc.ABC, metaclass=abc.ABCMeta):
+    package: Incomplete
+    log: Incomplete
+    verbose_logging: bool
+    def __init__(self, package, device) -> None: ...
+    def __del__(self) -> None: ...
+    def initialize(self) -> None: ...
+    @abc.abstractmethod
+    def before_starting_server(self): ...
+    @abc.abstractmethod
+    def start_server(self): ...
+    @abc.abstractmethod
+    def make_connection(self): ...
+    def __getattr__(self, name): ...
+    def set_snippet_client_verbose_logging(self, verbose) -> None: ...
+    @abc.abstractmethod
+    def restore_server_connection(self, port: Incomplete | None = ...): ...
+    @abc.abstractmethod
+    def check_server_proc_running(self): ...
+    @abc.abstractmethod
+    def send_rpc_request(self, request): ...
+    @abc.abstractmethod
+    def handle_callback(self, callback_id, ret_value, rpc_func_name): ...
+    @abc.abstractmethod
+    def stop(self): ...
+    @abc.abstractmethod
+    def close_connection(self): ...
diff --git a/stubs/mobly/snippet/errors.pyi b/stubs/mobly/snippet/errors.pyi
new file mode 100644
index 0000000..2c2ac2a
--- /dev/null
+++ b/stubs/mobly/snippet/errors.pyi
@@ -0,0 +1,18 @@
+from mobly.controllers.android_device_lib import errors as errors
+
+class Error(errors.DeviceError): ...
+class ServerRestoreConnectionError(Error): ...
+class ServerStartError(Error): ...
+class ServerStartProtocolError(ServerStartError): ...
+class ServerStartPreCheckError(Error): ...
+class ApiError(Error): ...
+
+class ProtocolError(Error):
+    NO_RESPONSE_FROM_HANDSHAKE: str
+    NO_RESPONSE_FROM_SERVER: str
+    MISMATCHED_API_ID: str
+    RESPONSE_MISSING_FIELD: str
+
+class ServerDiedError(Error): ...
+class CallbackHandlerBaseError(errors.DeviceError): ...
+class CallbackHandlerTimeoutError(Error): ...
diff --git a/stubs/mobly/suite_runner.pyi b/stubs/mobly/suite_runner.pyi
new file mode 100644
index 0000000..415ed32
--- /dev/null
+++ b/stubs/mobly/suite_runner.pyi
@@ -0,0 +1,12 @@
+from _typeshed import Incomplete
+from mobly import base_suite as base_suite
+from mobly import base_test as base_test
+from mobly import config_parser as config_parser
+from mobly import signals as signals
+from mobly import test_runner as test_runner
+
+class Error(Exception): ...
+
+def run_suite_class(argv: Incomplete | None = ...) -> None: ...
+def run_suite(test_classes, argv: Incomplete | None = ...) -> None: ...
+def compute_selected_tests(test_classes, selected_tests): ...
diff --git a/stubs/mobly/test_runner.pyi b/stubs/mobly/test_runner.pyi
new file mode 100644
index 0000000..f2aee14
--- /dev/null
+++ b/stubs/mobly/test_runner.pyi
@@ -0,0 +1,53 @@
+from collections.abc import Generator
+
+from _typeshed import Incomplete
+from mobly import base_test as base_test
+from mobly import config_parser as config_parser
+from mobly import logger as logger
+from mobly import records as records
+from mobly import signals as signals
+from mobly import utils as utils
+
+class Error(Exception): ...
+
+def main(argv: Incomplete | None = ...) -> None: ...
+def parse_mobly_cli_args(argv): ...
+
+class TestRunner:
+    class _TestRunInfo:
+        config: Incomplete
+        test_class: Incomplete
+        test_class_name_suffix: Incomplete
+        tests: Incomplete
+        def __init__(
+            self,
+            config,
+            test_class,
+            tests: Incomplete | None = ...,
+            test_class_name_suffix: Incomplete | None = ...,
+        ) -> None: ...
+
+    class _TestRunMetaData:
+        root_output_path: Incomplete
+        def __init__(self, log_dir, testbed_name) -> None: ...
+        def generate_test_run_log_path(self): ...
+        def set_start_point(self) -> None: ...
+        def set_end_point(self) -> None: ...
+        @property
+        def run_id(self): ...
+        @property
+        def time_elapsed_sec(self): ...
+
+    results: Incomplete
+    def __init__(self, log_dir, testbed_name) -> None: ...
+    def mobly_logger(
+        self, alias: str = ..., console_level=...
+    ) -> Generator[Incomplete, None, None]: ...
+    def add_test_class(
+        self,
+        config,
+        test_class,
+        tests: Incomplete | None = ...,
+        name_suffix: Incomplete | None = ...,
+    ) -> None: ...
+    def run(self) -> None: ...
diff --git a/stubs/mobly/utils.pyi b/stubs/mobly/utils.pyi
new file mode 100644
index 0000000..7024b47
--- /dev/null
+++ b/stubs/mobly/utils.pyi
@@ -0,0 +1,43 @@
+from _typeshed import Incomplete
+
+MAX_FILENAME_LEN: int
+MAX_PORT_ALLOCATION_RETRY: int
+ascii_letters_and_digits: Incomplete
+valid_filename_chars: Incomplete
+GMT_to_olson: Incomplete
+
+class Error(Exception): ...
+
+def abs_path(path): ...
+def create_dir(path) -> None: ...
+def create_alias(target_path, alias_path) -> None: ...
+def get_current_epoch_time(): ...
+def get_current_human_time(): ...
+def epoch_to_human_time(epoch_time): ...
+def get_timezone_olson_id(): ...
+def find_files(paths, file_predicate): ...
+def load_file_to_base64_str(f_path): ...
+def find_field(item_list, cond, comparator, target_field): ...
+def rand_ascii_str(length): ...
+def concurrent_exec(
+    func, param_list, max_workers: int = ..., raise_on_exception: bool = ...
+): ...
+def run_command(
+    cmd,
+    stdout=...,
+    stderr=...,
+    shell=...,
+    timeout=...,
+    cwd=...,
+    env=...,
+    universal_newlines: bool = ...,
+) -> tuple[int, bytes, bytes] | tuple[int, str, str]: ...
+def start_standing_subprocess(cmd, shell: bool = ..., env: Incomplete | None = ...): ...
+def stop_standing_subprocess(proc) -> None: ...
+def wait_for_standing_subprocess(proc, timeout: Incomplete | None = ...) -> None: ...
+def get_available_host_port(): ...
+def grep(regex, output): ...
+def cli_cmd_to_string(args): ...
+def get_settable_properties(cls): ...
+def find_subclasses_in_module(base_classes, module): ...
+def find_subclass_in_module(base_class, module): ...
diff --git a/src/antlion/tests/BUILD.gn b/tests/BUILD.gn
similarity index 87%
rename from src/antlion/tests/BUILD.gn
rename to tests/BUILD.gn
index e0d98ba..e17a202 100644
--- a/src/antlion/tests/BUILD.gn
+++ b/tests/BUILD.gn
@@ -19,13 +19,11 @@
   public_deps = [
     "examples:e2e_tests_quick",
     "wlan:e2e_tests_quick",
-    "wlan_policy:e2e_tests",
+    "wlan_policy:e2e_tests_quick",
   ]
 }
 
 group("e2e_tests_manual") {
   testonly = true
-  public_deps = [
-    "wlan:e2e_tests_manual",
-  ]
+  public_deps = [ "wlan:e2e_tests_manual" ]
 }
diff --git a/src/antlion/tests/dhcp/BUILD.gn b/tests/dhcp/BUILD.gn
similarity index 80%
rename from src/antlion/tests/dhcp/BUILD.gn
rename to tests/dhcp/BUILD.gn
index c3acdd3..adf78e0 100644
--- a/src/antlion/tests/dhcp/BUILD.gn
+++ b/tests/dhcp/BUILD.gn
@@ -30,9 +30,9 @@
 group("e2e_tests") {
   testonly = true
   public_deps = [
-    ":dhcpv4_duplicate_address_test($host_toolchain)",
-    ":dhcpv4_interop_basic_test($host_toolchain)",
-    ":dhcpv4_interop_combinatorial_options_test($host_toolchain)",
-    ":dhcpv4_interop_fixture_test($host_toolchain)",
+    ":dhcpv4_duplicate_address_test",
+    ":dhcpv4_interop_basic_test",
+    ":dhcpv4_interop_combinatorial_options_test",
+    ":dhcpv4_interop_fixture_test",
   ]
 }
diff --git a/src/antlion/tests/dhcp/Dhcpv4DuplicateAddressTest.py b/tests/dhcp/Dhcpv4DuplicateAddressTest.py
similarity index 83%
rename from src/antlion/tests/dhcp/Dhcpv4DuplicateAddressTest.py
rename to tests/dhcp/Dhcpv4DuplicateAddressTest.py
index 4614e59..f564011 100644
--- a/src/antlion/tests/dhcp/Dhcpv4DuplicateAddressTest.py
+++ b/tests/dhcp/Dhcpv4DuplicateAddressTest.py
@@ -14,28 +14,30 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import ipaddress
 import re
+from ipaddress import IPv4Address
+
+from mobly import asserts, signals, test_runner
 
 from antlion.controllers.ap_lib import dhcp_config
 from antlion.controllers.utils_lib.commands import ip
 from antlion.test_utils.dhcp import base_test
 
-from mobly import asserts, test_runner
-
 
 class Dhcpv4DuplicateAddressTest(base_test.Dhcpv4InteropFixture):
-    def setup_test(self):
+    def setup_test(self) -> None:
         super().setup_test()
-        self.extra_addresses = []
+        self.extra_addresses: list[IPv4Address] = []
         self.ap_params = self.setup_ap()
         self.ap_ip_cmd = ip.LinuxIpCommand(self.access_point.ssh)
 
-    def teardown_test(self):
+    def teardown_test(self) -> None:
         super().teardown_test()
         for ip in self.extra_addresses:
-            self.ap_ip_cmd.remove_ipv4_address(self.ap_params["id"], ip)
+            self.ap_ip_cmd.remove_ipv4_address(self.ap_params.id, ip)
 
-    def test_duplicate_address_assignment(self):
+    def test_duplicate_address_assignment(self) -> None:
         """It's possible for a DHCP server to assign an address that already exists on the network.
         DHCP clients are expected to perform a "gratuitous ARP" of the to-be-assigned address, and
         refuse to assign that address. Clients should also recover by asking for a different
@@ -43,10 +45,10 @@
         """
         # Modify subnet to hold fewer addresses.
         # A '/29' has 8 addresses (6 usable excluding router / broadcast)
-        subnet = next(self.ap_params["network"].subnets(new_prefix=29))
+        subnet = next(self.ap_params.network.subnets(new_prefix=29))
         subnet_conf = dhcp_config.Subnet(
             subnet=subnet,
-            router=self.ap_params["ip"],
+            router=self.ap_params.ip,
             # When the DHCP server is considering dynamically allocating an IP address to a client,
             # it first sends an ICMP Echo request (a ping) to the address being assigned. It waits
             # for a second, and if no ICMP Echo response has been heard, it assigns the address.
@@ -62,7 +64,9 @@
         # Add each of the usable IPs as an alias for the router's interface, such that the router
         # will respond to any pings on it.
         for ip in subnet.hosts():
-            self.ap_ip_cmd.add_ipv4_address(self.ap_params["id"], ip)
+            self.ap_ip_cmd.add_ipv4_address(
+                self.ap_params.id, ipaddress.IPv4Interface(f"{ip}/{ip.max_prefixlen}")
+            )
             # Ensure we remove the address in self.teardown_test() even if the test fails
             self.extra_addresses.append(ip)
 
@@ -73,6 +77,9 @@
         # Per spec, the flow should be:
         # Discover -> Offer -> Request -> Ack -> client optionally performs DAD
         dhcp_logs = self.access_point.get_dhcp_logs()
+        if dhcp_logs is None:
+            raise signals.TestError("DHCP logs not found; was the DHCP server started?")
+
         for expected_message in [
             r"DHCPDISCOVER from \S+",
             r"DHCPOFFER on [0-9.]+ to \S+",
@@ -92,12 +99,14 @@
         # router after this will not work.
         while self.extra_addresses:
             self.ap_ip_cmd.remove_ipv4_address(
-                self.ap_params["id"], self.extra_addresses.pop()
+                self.ap_params.id, self.extra_addresses.pop()
             )
 
         # Now, we should get an address successfully
         ip = self.get_device_ipv4_addr()
         dhcp_logs = self.access_point.get_dhcp_logs()
+        if dhcp_logs is None:
+            raise signals.TestError("DHCP logs not found; was the DHCP server started?")
 
         expected_string = f"DHCPREQUEST for {ip}"
         asserts.assert_true(
diff --git a/src/antlion/tests/dhcp/Dhcpv4InteropBasicTest.py b/tests/dhcp/Dhcpv4InteropBasicTest.py
similarity index 73%
rename from src/antlion/tests/dhcp/Dhcpv4InteropBasicTest.py
rename to tests/dhcp/Dhcpv4InteropBasicTest.py
index b3d1ce9..ab07b52 100644
--- a/src/antlion/tests/dhcp/Dhcpv4InteropBasicTest.py
+++ b/tests/dhcp/Dhcpv4InteropBasicTest.py
@@ -14,38 +14,35 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import time
 import re
+import time
+
+from mobly import asserts, signals, test_runner
 
 from antlion.controllers.ap_lib import dhcp_config
 from antlion.test_utils.dhcp import base_test
 
-from mobly import asserts, test_runner
-
 
 class Dhcpv4InteropBasicTest(base_test.Dhcpv4InteropFixture):
     """DhcpV4 tests which validate basic DHCP client/server interactions."""
 
-    def test_basic_dhcp_assignment(self):
+    def test_basic_dhcp_assignment(self) -> None:
         self.run_test_case_expect_dhcp_success(
-            "basic_dhcp_assignment",
-            settings={"dhcp_options": {}, "dhcp_parameters": {}},
+            dhcp_options={},
+            dhcp_parameters={},
         )
 
-    def test_pool_allows_unknown_clients(self):
+    def test_pool_allows_unknown_clients(self) -> None:
         self.run_test_case_expect_dhcp_success(
-            "pool_allows_unknown_clients",
-            settings={
-                "dhcp_options": {},
-                "dhcp_parameters": {"allow": "unknown-clients"},
-            },
+            dhcp_options={},
+            dhcp_parameters={"allow": "unknown-clients"},
         )
 
-    def test_pool_disallows_unknown_clients(self):
+    def test_pool_disallows_unknown_clients(self) -> None:
         ap_params = self.setup_ap()
         subnet_conf = dhcp_config.Subnet(
-            subnet=ap_params["network"],
-            router=ap_params["ip"],
+            subnet=ap_params.network,
+            router=ap_params.ip,
             additional_parameters={"deny": "unknown-clients"},
         )
         dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf])
@@ -56,18 +53,19 @@
             self.get_device_ipv4_addr()
 
         dhcp_logs = self.access_point.get_dhcp_logs()
+        if dhcp_logs is None:
+            raise signals.TestError("DHCP logs not found; was the DHCP server started?")
+
         asserts.assert_true(
             re.search(r"DHCPDISCOVER from .*no free leases", dhcp_logs),
             "Did not find expected message in dhcp logs: " + dhcp_logs + "\n",
         )
 
-    def test_lease_renewal(self):
+    def test_lease_renewal(self) -> None:
         """Validates that a client renews their DHCP lease."""
         LEASE_TIME = 30
         ap_params = self.setup_ap()
-        subnet_conf = dhcp_config.Subnet(
-            subnet=ap_params["network"], router=ap_params["ip"]
-        )
+        subnet_conf = dhcp_config.Subnet(subnet=ap_params.network, router=ap_params.ip)
         dhcp_conf = dhcp_config.DhcpConfig(
             subnets=[subnet_conf],
             default_lease_time=LEASE_TIME,
@@ -82,10 +80,13 @@
         time.sleep(SLEEP_TIME)
 
         dhcp_logs = self.access_point.get_dhcp_logs()
+        if dhcp_logs is None:
+            raise signals.TestError("DHCP logs not found; was the DHCP server started?")
+
         # Fuchsia renews at LEASE_TIME / 2, so there should be at least 2 DHCPREQUESTs in logs.
         # The log lines look like:
-        # INFO dhcpd[17385]: DHCPREQUEST for 192.168.9.2 from f8:0f:f9:3d:ce:d1 via wlan1
-        # INFO dhcpd[17385]: DHCPACK on 192.168.9.2 to f8:0f:f9:3d:ce:d1 via wlan1
+        # INFO dhcpd[17385]: DHCPREQUEST for 192.168.9.2 from 01:23:45:67:89:ab via wlan1
+        # INFO dhcpd[17385]: DHCPACK on 192.168.9.2 to 01:23:45:67:89:ab via wlan1
         expected_string = f"DHCPREQUEST for {ip}"
         asserts.assert_true(
             dhcp_logs.count(expected_string) >= 2,
diff --git a/src/antlion/tests/dhcp/Dhcpv4InteropCombinatorialOptionsTest.py b/tests/dhcp/Dhcpv4InteropCombinatorialOptionsTest.py
similarity index 67%
rename from src/antlion/tests/dhcp/Dhcpv4InteropCombinatorialOptionsTest.py
rename to tests/dhcp/Dhcpv4InteropCombinatorialOptionsTest.py
index 7e7b379..ddb0aa3 100644
--- a/src/antlion/tests/dhcp/Dhcpv4InteropCombinatorialOptionsTest.py
+++ b/tests/dhcp/Dhcpv4InteropCombinatorialOptionsTest.py
@@ -15,79 +15,80 @@
 # limitations under the License.
 
 import random
-
-from collections import namedtuple
-from typing import Dict, Union
-
-from antlion.test_utils.dhcp import base_test
+from dataclasses import dataclass
 
 from mobly import asserts, test_runner
 
+from antlion.test_utils.dhcp import base_test
+
 OPT_NUM_DOMAIN_SEARCH = 119
 OPT_NUM_DOMAIN_NAME = 15
 
-Test = namedtuple(typename="Args", field_names=["name", "settings"])
+
+@dataclass
+class Test:
+    name: str
+    dhcp_options: dict[str, int | str]
+    dhcp_parameters: dict[str, str]
 
 
 class Dhcpv4InteropCombinatorialOptionsTest(base_test.Dhcpv4InteropFixture):
     """DhcpV4 tests which validate combinations of DHCP options."""
+    def pre_run(self) -> None:
+        def test_logic(t: Test) -> None:
+            self.run_test_case_expect_dhcp_success(t.dhcp_parameters, t.dhcp_options)
 
-    def setup_generated_tests(self) -> None:
+        def name_func(t: Test) -> str:
+            return f"test_{t.name}"
+
         self.generate_tests(
-            self.run_test_case_expect_dhcp_success,
-            lambda name, *_: f"test_{name}",
-            [
-                Test(
-                    "domain_name_valid",
-                    {
-                        "dhcp_options": {
+            test_logic=test_logic,
+            name_func=name_func,
+            arg_sets=[
+                (t,)
+                for t in [
+                    Test(
+                        name="domain_name_valid",
+                        dhcp_options={
                             "domain-name": '"example.test"',
                             "dhcp-parameter-request-list": OPT_NUM_DOMAIN_NAME,
                         },
-                        "dhcp_parameters": {},
-                    },
-                ),
-                Test(
-                    "domain_name_invalid",
-                    {
-                        "dhcp_options": {
+                        dhcp_parameters={},
+                    ),
+                    Test(
+                        name="domain_name_invalid",
+                        dhcp_options={
                             "domain-name": '"example.invalid"',
                             "dhcp-parameter-request-list": OPT_NUM_DOMAIN_NAME,
                         },
-                        "dhcp_parameters": {},
-                    },
-                ),
-                Test(
-                    "domain_search_valid",
-                    {
-                        "dhcp_options": {
+                        dhcp_parameters={},
+                    ),
+                    Test(
+                        name="domain_search_valid",
+                        dhcp_options={
                             "domain-name": '"example.test"',
                             "dhcp-parameter-request-list": OPT_NUM_DOMAIN_SEARCH,
                         },
-                        "dhcp_parameters": {},
-                    },
-                ),
-                Test(
-                    "domain_search_invalid",
-                    {
-                        "dhcp_options": {
+                        dhcp_parameters={},
+                    ),
+                    Test(
+                        name="domain_search_invalid",
+                        dhcp_options={
                             "domain-name": '"example.invalid"',
                             "dhcp-parameter-request-list": OPT_NUM_DOMAIN_SEARCH,
                         },
-                        "dhcp_parameters": {},
-                    },
-                ),
-                Test(
-                    "max_sized_message",
-                    {
-                        "dhcp_options": self._generate_max_sized_message_dhcp_options(),
-                        "dhcp_parameters": {},
-                    },
-                ),
+                        dhcp_parameters={},
+                    ),
+                    Test(
+                        name="max_sized_message",
+                        dhcp_options=self._generate_max_sized_message_dhcp_options(),
+                        dhcp_parameters={},
+                    ),
+                ]
             ],
         )
 
-    def _generate_max_sized_message_dhcp_options(self) -> Dict[str, Union[int, str]]:
+    def _generate_max_sized_message_dhcp_options(self) -> dict[str, int | str]:
         """Generates the DHCP options for max sized message test.
 
         The RFC limits DHCP payloads to 576 bytes unless the client signals it
diff --git a/src/antlion/tests/dhcp/Dhcpv4InteropFixtureTest.py b/tests/dhcp/Dhcpv4InteropFixtureTest.py
similarity index 87%
rename from src/antlion/tests/dhcp/Dhcpv4InteropFixtureTest.py
rename to tests/dhcp/Dhcpv4InteropFixtureTest.py
index ebbf866..7303052 100644
--- a/src/antlion/tests/dhcp/Dhcpv4InteropFixtureTest.py
+++ b/tests/dhcp/Dhcpv4InteropFixtureTest.py
@@ -14,11 +14,11 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from mobly import asserts, test_runner
+
 from antlion.controllers.ap_lib import dhcp_config
 from antlion.test_utils.dhcp import base_test
 
-from mobly import asserts, test_runner
-
 
 class Dhcpv4InteropFixtureTest(base_test.Dhcpv4InteropFixture):
     """Tests which validate the behavior of the Dhcpv4InteropFixture.
@@ -27,31 +27,31 @@
     since they interact with hardware (specifically, the AP), we have to
     write and run them like the rest of the ACTS tests."""
 
-    def test_invalid_options_not_accepted(self):
+    def test_invalid_options_not_accepted(self) -> None:
         """Ensures the DHCP server doesn't accept invalid options"""
         ap_params = self.setup_ap()
         subnet_conf = dhcp_config.Subnet(
-            subnet=ap_params["network"],
-            router=ap_params["ip"],
+            subnet=ap_params.network,
+            router=ap_params.ip,
             additional_options={"foo": "bar"},
         )
         dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf])
         with asserts.assert_raises_regex(Exception, r"failed to start"):
             self.access_point.start_dhcp(dhcp_conf=dhcp_conf)
 
-    def test_invalid_parameters_not_accepted(self):
+    def test_invalid_parameters_not_accepted(self) -> None:
         """Ensures the DHCP server doesn't accept invalid parameters"""
         ap_params = self.setup_ap()
         subnet_conf = dhcp_config.Subnet(
-            subnet=ap_params["network"],
-            router=ap_params["ip"],
+            subnet=ap_params.network,
+            router=ap_params.ip,
             additional_parameters={"foo": "bar"},
         )
         dhcp_conf = dhcp_config.DhcpConfig(subnets=[subnet_conf])
         with asserts.assert_raises_regex(Exception, r"failed to start"):
             self.access_point.start_dhcp(dhcp_conf=dhcp_conf)
 
-    def test_no_dhcp_server_started(self):
+    def test_no_dhcp_server_started(self) -> None:
         """Validates that the test fixture does not start a DHCP server."""
         ap_params = self.setup_ap()
         self.connect(ap_params=ap_params)
diff --git a/src/antlion/tests/examples/BUILD.gn b/tests/examples/BUILD.gn
similarity index 80%
rename from src/antlion/tests/examples/BUILD.gn
rename to tests/examples/BUILD.gn
index 066d515..f42d767 100644
--- a/src/antlion/tests/examples/BUILD.gn
+++ b/tests/examples/BUILD.gn
@@ -14,14 +14,10 @@
 
 group("e2e_tests_quick") {
   testonly = true
-  public_deps = [
-    ":sl4f_sanity_test($host_toolchain)",
-  ]
+  public_deps = [ ":sl4f_sanity_test" ]
 }
 
 group("e2e_tests") {
   testonly = true
-  public_deps = [
-    ":sl4f_sanity_test($host_toolchain)",
-  ]
+  public_deps = [ ":sl4f_sanity_test" ]
 }
diff --git a/src/antlion/tests/examples/Sl4fSanityTest.py b/tests/examples/Sl4fSanityTest.py
similarity index 78%
rename from src/antlion/tests/examples/Sl4fSanityTest.py
rename to tests/examples/Sl4fSanityTest.py
index 82c04f3..d97c7be 100644
--- a/src/antlion/tests/examples/Sl4fSanityTest.py
+++ b/tests/examples/Sl4fSanityTest.py
@@ -19,18 +19,18 @@
 """
 
 import logging
-from typing import List
 
+from mobly import asserts, test_runner
+
+from antlion import base_test
 from antlion.controllers import fuchsia_device
 from antlion.controllers.fuchsia_device import FuchsiaDevice
 
-from mobly import asserts, test_runner, base_test
 
-
-class Sl4fSanityTest(base_test.BaseTestClass):
-    def setup_class(self):
+class Sl4fSanityTest(base_test.AntlionBaseTest):
+    def setup_class(self) -> None:
         self.log = logging.getLogger()
-        self.fuchsia_devices: List[FuchsiaDevice] = self.register_controller(
+        self.fuchsia_devices: list[FuchsiaDevice] = self.register_controller(
             fuchsia_device
         )
 
@@ -38,9 +38,9 @@
             len(self.fuchsia_devices) == 0, "Requires at least one Fuchsia device"
         )
 
-    def test_example(self):
+    def test_example(self) -> None:
         for fuchsia_device in self.fuchsia_devices:
-            res = fuchsia_device.sl4f.netstack_lib.netstackListInterfaces()
+            res = fuchsia_device.honeydew_fd.netstack.list_interfaces()
             self.log.info(res)
 
 
diff --git a/src/antlion/tests/wlan/BUILD.gn b/tests/wlan/BUILD.gn
similarity index 86%
rename from src/antlion/tests/wlan/BUILD.gn
rename to tests/wlan/BUILD.gn
index 717fed9..a79ca51 100644
--- a/src/antlion/tests/wlan/BUILD.gn
+++ b/tests/wlan/BUILD.gn
@@ -18,13 +18,16 @@
 group("e2e_tests_quick") {
   testonly = true
   public_deps = [
+    "compliance:e2e_tests_quick",
     "functional:e2e_tests_quick",
+    "performance:e2e_tests_quick",
   ]
 }
 
 group("e2e_tests_manual") {
   testonly = true
   public_deps = [
+    "compliance:e2e_tests_manual",
     "functional:e2e_tests_manual",
     "performance:e2e_tests_manual",
   ]
diff --git a/tests/wlan/compliance/BUILD.gn b/tests/wlan/compliance/BUILD.gn
new file mode 100644
index 0000000..da0c7ab
--- /dev/null
+++ b/tests/wlan/compliance/BUILD.gn
@@ -0,0 +1,611 @@
+# Copyright 2023 The Fuchsia Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//third_party/antlion/antlion_host_test.gni")
+import("//third_party/antlion/environments.gni")
+
+assert(is_host, "antlion tests only supported for host testing")
+
+# wlan phy n compliance tests
+antlion_host_test("wlan_phy_compliance_11n_2g_ht20_open_test") {
+  main_source = "WlanPhyCompliance11NTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11n_2\.4GHz_HT20_open_.*" ]
+}
+
+antlion_host_test("wlan_phy_compliance_11n_2g_ht40lower_open_test") {
+  main_source = "WlanPhyCompliance11NTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11n_2\.4GHz_HT40Lower_open_.*" ]
+}
+
+antlion_host_test("wlan_phy_compliance_11n_2g_ht40upper_open_test") {
+  main_source = "WlanPhyCompliance11NTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11n_2\.4GHz_HT40Upper_open_.*" ]
+}
+
+antlion_host_test("wlan_phy_compliance_11n_5g_ht20_open_test") {
+  main_source = "WlanPhyCompliance11NTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11n_5GHz_HT20_open_.*" ]
+}
+
+antlion_host_test("wlan_phy_compliance_11n_5g_ht40lower_open_test") {
+  main_source = "WlanPhyCompliance11NTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11n_5GHz_HT40Lower_open_.*" ]
+}
+
+antlion_host_test("wlan_phy_compliance_11n_5g_ht40upper_open_test") {
+  main_source = "WlanPhyCompliance11NTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11n_5GHz_HT40Upper_open_.*" ]
+}
+
+antlion_host_test("wlan_phy_compliance_11n_2g_ht20_wpa2_test") {
+  main_source = "WlanPhyCompliance11NTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11n_2\.4GHz_HT20_wpa2_.*" ]
+}
+
+antlion_host_test("wlan_phy_compliance_11n_2g_ht40lower_wpa2_test") {
+  main_source = "WlanPhyCompliance11NTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11n_2\.4GHz_HT40Lower_wpa2_.*" ]
+}
+
+antlion_host_test("wlan_phy_compliance_11n_2g_ht40upper_wpa2_test") {
+  main_source = "WlanPhyCompliance11NTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11n_2\.4GHz_HT40Upper_wpa2_.*" ]
+}
+
+antlion_host_test("wlan_phy_compliance_11n_5g_ht20_wpa2_test") {
+  main_source = "WlanPhyCompliance11NTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11n_5GHz_HT20_wpa2_.*" ]
+}
+
+antlion_host_test("wlan_phy_compliance_11n_5g_ht40lower_wpa2_test") {
+  main_source = "WlanPhyCompliance11NTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11n_5GHz_HT40Lower_wpa2_.*" ]
+}
+
+antlion_host_test("wlan_phy_compliance_11n_5g_ht40upper_wpa2_test") {
+  main_source = "WlanPhyCompliance11NTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11n_5GHz_HT40Upper_wpa2_.*" ]
+}
+
+# wlan phy ac compliance tests
+# 20mhz open
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_exp0_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_20mhz_open_(?:.*MAX-A-MPDU-LEN-EXP0.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_exp1_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_20mhz_open_(?:.*MAX-A-MPDU-LEN-EXP1.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_exp2_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_20mhz_open_(?:.*MAX-A-MPDU-LEN-EXP2.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_exp3_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_20mhz_open_(?:.*MAX-A-MPDU-LEN-EXP3.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_exp4_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_20mhz_open_(?:.*MAX-A-MPDU-LEN-EXP4.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_exp5_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_20mhz_open_(?:.*MAX-A-MPDU-LEN-EXP5.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_exp6_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_20mhz_open_(?:.*MAX-A-MPDU-LEN-EXP6.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_exp7_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_20mhz_open_(?:.*MAX-A-MPDU-LEN-EXP7.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_disabled_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_20mhz_open((?!MAX-A-MPDU-LEN-EXP[\d]).)*\$" ]
+}
+
+# 20mhz wpa2
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_exp0_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_20mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP0.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_exp1_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_20mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP1.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_exp2_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_20mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP2.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_exp3_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_20mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP3.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_exp4_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_20mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP4.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_exp5_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_20mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP5.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_exp6_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_20mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP6.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_exp7_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_20mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP7.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_disabled_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_20mhz_wpa2((?!MAX-A-MPDU-LEN-EXP[\d]).)*\$" ]
+}
+
+# 40mhz open
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_exp0_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_40mhz_open_(?:.*MAX-A-MPDU-LEN-EXP0.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_exp1_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_40mhz_open_(?:.*MAX-A-MPDU-LEN-EXP1.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_exp2_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_40mhz_open_(?:.*MAX-A-MPDU-LEN-EXP2.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_exp3_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_40mhz_open_(?:.*MAX-A-MPDU-LEN-EXP3.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_exp4_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_40mhz_open_(?:.*MAX-A-MPDU-LEN-EXP4.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_exp5_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_40mhz_open_(?:.*MAX-A-MPDU-LEN-EXP5.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_exp6_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_40mhz_open_(?:.*MAX-A-MPDU-LEN-EXP6.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_exp7_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_40mhz_open_(?:.*MAX-A-MPDU-LEN-EXP7.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_disabled_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_40mhz_open((?!MAX-A-MPDU-LEN-EXP[\d]).)*\$" ]
+}
+
+# 40mhz wpa2
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_exp0_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_40mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP0.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_exp1_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_40mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP1.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_exp2_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_40mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP2.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_exp3_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_40mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP3.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_exp4_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_40mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP4.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_exp5_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_40mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP5.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_exp6_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_40mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP6.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_exp7_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_40mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP7.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_disabled_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_40mhz_wpa2((?!MAX-A-MPDU-LEN-EXP[\d]).)*\$" ]
+}
+
+# 80mhz open
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_exp0_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_80mhz_open_(?:.*MAX-A-MPDU-LEN-EXP0.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_exp1_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_80mhz_open_(?:.*MAX-A-MPDU-LEN-EXP1.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_exp2_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_80mhz_open_(?:.*MAX-A-MPDU-LEN-EXP2.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_exp3_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_80mhz_open_(?:.*MAX-A-MPDU-LEN-EXP3.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_exp4_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_80mhz_open_(?:.*MAX-A-MPDU-LEN-EXP4.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_exp5_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_80mhz_open_(?:.*MAX-A-MPDU-LEN-EXP5.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_exp6_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_80mhz_open_(?:.*MAX-A-MPDU-LEN-EXP6.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_exp7_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_80mhz_open_(?:.*MAX-A-MPDU-LEN-EXP7.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_disabled_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_80mhz_open((?!MAX-A-MPDU-LEN-EXP[\d]).)*\$" ]
+}
+
+# 80mhz wpa2
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_exp0_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_80mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP0.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_exp1_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_80mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP1.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_exp2_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_80mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP2.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_exp3_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_80mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP3.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_exp4_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_80mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP4.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_exp5_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_80mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP5.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_exp6_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_80mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP6.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_exp7_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_80mhz_wpa2_(?:.*MAX-A-MPDU-LEN-EXP7.*)" ]
+}
+
+antlion_host_test(
+    "wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_disabled_test") {
+  main_source = "WlanPhyCompliance11ACTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test_11ac_80mhz_wpa2((?!MAX-A-MPDU-LEN-EXP[\d]).)*\$" ]
+}
+
+antlion_host_test("wlan_security_compliance_11a_test") {
+  main_source = "WlanSecurityComplianceABGTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test.*11a.*" ]
+  timeout_secs = 1800
+}
+
+antlion_host_test("wlan_security_compliance_11bg_test") {
+  main_source = "WlanSecurityComplianceABGTest.py"
+  environments = display_ap_envs
+  test_cases = [ "re:test.*11bg.*" ]
+  timeout_secs = 1800
+}
+
+antlion_host_test("wlan_phy_compliance_abg_test") {
+  main_source = "WlanPhyComplianceABGTest.py"
+  environments = display_ap_envs
+  timeout_secs = 1200
+}
+
+antlion_host_test("regulatory_compliance_test") {
+  main_source = "RegulatoryComplianceTest.py"
+  environments = display_ap_envs
+}
+
+antlion_host_test("vape_interop_test") {
+  main_source = "VapeInteropTest.py"
+  environments = display_ap_envs
+  timeout_secs = 900
+}
+
+antlion_host_test("wlan_security_compliance_abg_test_quick") {
+  main_source = "WlanSecurityComplianceABGTest.py"
+  environments = display_ap_envs
+  test_cases = [
+    "test_associate_11bg_sec_open_wep_26_hex_ptk_none",
+    "test_associate_11bg_sec_wpa_psk_ptk_tkip",
+    "test_associate_11bg_sec_wpa_psk_ptk_ccmp",
+    "test_associate_11bg_sec_wpa2_psk_ptk_tkip",
+    "test_associate_11bg_sec_wpa2_psk_ptk_ccmp",
+    "test_associate_11bg_pmf_sec_wpa2_psk_ptk_ccmp",
+    "test_associate_11bg_sec_wpa_wpa2_psk_ptk_tkip",
+    "test_associate_11bg_sec_wpa_wpa2_psk_ptk_ccmp",
+    "test_associate_11bg_sec_wpa3_sae_ptk_ccmp",
+    "test_associate_11bg_sec_wpa2_wpa3_psk_sae_ptk_ccmp",
+  ]
+}
+
+group("e2e_tests") {
+  testonly = true
+  public_deps = [
+    ":vape_interop_test",
+    ":wlan_phy_compliance_11n_2g_ht20_open_test",
+    ":wlan_phy_compliance_11n_2g_ht20_wpa2_test",
+    ":wlan_phy_compliance_11n_2g_ht40lower_open_test",
+    ":wlan_phy_compliance_11n_2g_ht40lower_wpa2_test",
+    ":wlan_phy_compliance_11n_2g_ht40upper_open_test",
+    ":wlan_phy_compliance_11n_2g_ht40upper_wpa2_test",
+    ":wlan_phy_compliance_11n_5g_ht20_open_test",
+    ":wlan_phy_compliance_11n_5g_ht20_wpa2_test",
+    ":wlan_phy_compliance_11n_5g_ht40lower_open_test",
+    ":wlan_phy_compliance_11n_5g_ht40lower_wpa2_test",
+    ":wlan_phy_compliance_11n_5g_ht40upper_open_test",
+    ":wlan_phy_compliance_11n_5g_ht40upper_wpa2_test",
+    ":wlan_phy_compliance_abg_test",
+    ":wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_disabled_test",
+    ":wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_exp0_test",
+    ":wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_exp1_test",
+    ":wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_exp2_test",
+    ":wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_exp3_test",
+    ":wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_exp4_test",
+    ":wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_exp5_test",
+    ":wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_exp6_test",
+    ":wlan_phy_compliance_ac_5g_20mhz_open_max_a_mpdu_len_exp7_test",
+    ":wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_disabled_test",
+    ":wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_exp0_test",
+    ":wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_exp1_test",
+    ":wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_exp2_test",
+    ":wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_exp3_test",
+    ":wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_exp4_test",
+    ":wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_exp5_test",
+    ":wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_exp6_test",
+    ":wlan_phy_compliance_ac_5g_20mhz_wpa2_max_a_mpdu_len_exp7_test",
+    ":wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_disabled_test",
+    ":wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_exp0_test",
+    ":wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_exp1_test",
+    ":wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_exp2_test",
+    ":wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_exp3_test",
+    ":wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_exp4_test",
+    ":wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_exp5_test",
+    ":wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_exp6_test",
+    ":wlan_phy_compliance_ac_5g_40mhz_open_max_a_mpdu_len_exp7_test",
+    ":wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_disabled_test",
+    ":wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_exp0_test",
+    ":wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_exp1_test",
+    ":wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_exp2_test",
+    ":wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_exp3_test",
+    ":wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_exp4_test",
+    ":wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_exp5_test",
+    ":wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_exp6_test",
+    ":wlan_phy_compliance_ac_5g_40mhz_wpa2_max_a_mpdu_len_exp7_test",
+    ":wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_disabled_test",
+    ":wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_exp0_test",
+    ":wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_exp1_test",
+    ":wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_exp2_test",
+    ":wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_exp3_test",
+    ":wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_exp4_test",
+    ":wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_exp5_test",
+    ":wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_exp6_test",
+    ":wlan_phy_compliance_ac_5g_80mhz_open_max_a_mpdu_len_exp7_test",
+    ":wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_disabled_test",
+    ":wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_exp0_test",
+    ":wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_exp1_test",
+    ":wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_exp2_test",
+    ":wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_exp3_test",
+    ":wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_exp4_test",
+    ":wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_exp5_test",
+    ":wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_exp6_test",
+    ":wlan_phy_compliance_ac_5g_80mhz_wpa2_max_a_mpdu_len_exp7_test",
+    ":wlan_security_compliance_11a_test",
+    ":wlan_security_compliance_11bg_test",
+  ]
+}
+
+group("e2e_tests_quick") {
+  testonly = true
+  public_deps = [ ":wlan_security_compliance_abg_test_quick" ]
+}
+
+group("e2e_tests_manual") {
+  testonly = true
+  public_deps = [
+    # Running RegulatoryComplianceTest is usually only necessary when verifying
+    # new WLAN firmware patches. Take it out of automation; it takes too long
+    # otherwise.
+    ":regulatory_compliance_test",
+  ]
+}
diff --git a/tests/wlan/compliance/RegulatoryComplianceTest.py b/tests/wlan/compliance/RegulatoryComplianceTest.py
new file mode 100644
index 0000000..284c915
--- /dev/null
+++ b/tests/wlan/compliance/RegulatoryComplianceTest.py
@@ -0,0 +1,205 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 The Fuchsia Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+from typing import NamedTuple
+
+from honeydew.affordances.connectivity.wlan.utils.types import CountryCode
+from mobly import asserts, test_runner
+from mobly.config_parser import TestRunConfig
+
+from antlion import utils
+from antlion.controllers.access_point import setup_ap
+from antlion.controllers.ap_lib import hostapd_constants
+from antlion.controllers.ap_lib.hostapd_security import SecurityMode
+from antlion.controllers.ap_lib.regulatory_channels import (
+    COUNTRY_CHANNELS,
+    TEST_CHANNELS,
+)
+from antlion.controllers.fuchsia_device import FuchsiaDevice
+from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
+from antlion.test_utils.wifi import base_test
+
+N_CAPABILITIES_DEFAULT = [
+    hostapd_constants.N_CAPABILITY_LDPC,
+    hostapd_constants.N_CAPABILITY_SGI20,
+    hostapd_constants.N_CAPABILITY_SGI40,
+    hostapd_constants.N_CAPABILITY_TX_STBC,
+    hostapd_constants.N_CAPABILITY_RX_STBC1,
+]
+
+MAX_2_4_CHANNEL = 14
+
+
+class RegulatoryTest(NamedTuple):
+    country_code: str
+    channel: int
+    channel_bandwidth: int
+    expect_association: bool
+
+
+class RegulatoryComplianceTest(base_test.WifiBaseTest):
+    """Tests regulatory compliance.
+
+    Testbed Requirement:
+    * 1 x Fuchsia device (dut)
+    * 1 x access point
+    """
+
+    def __init__(self, configs: TestRunConfig) -> None:
+        super().__init__(configs)
+        self.log = logging.getLogger()
+        self.fuchsia_device, self.dut = self.get_dut_type(
+            FuchsiaDevice, AssociationMode.POLICY
+        )
+
+        self.access_point = self.access_points[0]
+        self.access_point.stop_all_aps()
+
+        self.regulatory_results = [
+            "====CountryCode,Channel,Frequency,ChannelBandwith,Connected/Not-Connected===="
+        ]
+
+    def pre_run(self) -> None:
+        tests: list[RegulatoryTest] = []
+        for country in COUNTRY_CHANNELS.values():
+            for channel, bandwidths in TEST_CHANNELS.items():
+                for bandwidth in bandwidths:
+                    tests.append(
+                        RegulatoryTest(
+                            country_code=country.country_code,
+                            channel=channel,
+                            channel_bandwidth=bandwidth,
+                            expect_association=(
+                                channel in country.allowed_channels
+                                and bandwidth in country.allowed_channels[channel]
+                            ),
+                        )
+                    )
+
+        def generate_test_name(
+            country_code: str,
+            channel: int,
+            channel_bandwidth: int,
+            _expect_association: bool,
+        ) -> str:
+            return f"test_{country_code}_channel_{channel}_{channel_bandwidth}mhz"
+
+        self.generate_tests(self.verify_channel_compliance, generate_test_name, tests)
+
+    def teardown_class(self) -> None:
+        super().teardown_class()
+
+        regulatory_save_path = f"{self.log_path}/regulatory_results.txt"
+        with open(regulatory_save_path, "w", encoding="utf-8") as file:
+            file.write("\n".join(self.regulatory_results))
+
+    def setup_test(self) -> None:
+        super().setup_test()
+        self.access_point.stop_all_aps()
+        for ad in self.android_devices:
+            ad.droid.wakeLockAcquireBright()
+            ad.droid.wakeUpNow()
+        self.dut.wifi_toggle_state(True)
+        self.dut.disconnect()
+
+    def teardown_test(self) -> None:
+        for ad in self.android_devices:
+            ad.droid.wakeLockRelease()
+            ad.droid.goToSleepNow()
+        self.dut.turn_location_off_and_scan_toggle_off()
+        self.dut.disconnect()
+        self.download_logs()
+        self.access_point.stop_all_aps()
+        super().teardown_test()
+
+    def setup_ap(
+        self,
+        channel: int,
+        channel_bandwidth: int,
+    ) -> str:
+        """Start network on AP with basic configuration.
+
+        Args:
+            channel: channel to use for network
+            channel_bandwidth: channel bandwidth in mhz to use for network,
+
+        Returns:
+            SSID of the newly created and running network
+
+        Raises:
+            ConnectionError if network is not started successfully.
+        """
+        ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
+        try:
+            setup_ap(
+                access_point=self.access_point,
+                profile_name="whirlwind",
+                channel=channel,
+                force_wmm=True,
+                ssid=ssid,
+                vht_bandwidth=channel_bandwidth,
+                setup_bridge=True,
+            )
+            self.log.info(
+                f"Network (ssid: {ssid}) up on channel {channel} "
+                f"w/ channel bandwidth {channel_bandwidth} MHz"
+            )
+            return ssid
+        except Exception as err:
+            raise ConnectionError(
+                f"Failed to setup ap on channel: {channel}, "
+                f"channel bandwidth: {channel_bandwidth} MHz. "
+            ) from err
+
+    def verify_channel_compliance(
+        self,
+        country_code: str,
+        channel: int,
+        channel_bandwidth: int,
+        expect_association: bool,
+    ) -> None:
+        """Verify device complies with provided regulatory requirements for a
+        specific channel and channel bandwidth. Run with generated test cases
+        in the verify_regulatory_compliance parent test.
+        """
+        self.fuchsia_device.wlan_controller.set_country_code(CountryCode(country_code))
+
+        ssid = self.setup_ap(channel, channel_bandwidth)
+
+        self.log.info(
+            f'Attempting to associate to network "{ssid}" on channel '
+            f"{channel} @ {channel_bandwidth}mhz"
+        )
+
+        associated = self.dut.associate(ssid, SecurityMode.OPEN)
+
+        channel_ghz = "2.4" if channel < 36 else "5"
+        association_code = "c" if associated else "nc"
+        regulatory_result = f"REGTRACKER: {country_code},{channel},{channel_ghz},{channel_bandwidth},{association_code}"
+        self.regulatory_results.append(regulatory_result)
+        self.log.info(regulatory_result)
+
+        asserts.assert_true(
+            associated == expect_association,
+            f"Expected device to{'' if expect_association else ' NOT'} "
+            f"associate using country code {country_code} for channel "
+            f"{channel} with channel bandwidth {channel_bandwidth} MHz.",
+        )
+
+
+if __name__ == "__main__":
+    test_runner.main()
diff --git a/src/antlion/tests/wlan/compliance/VapeInteropTest.py b/tests/wlan/compliance/VapeInteropTest.py
similarity index 69%
rename from src/antlion/tests/wlan/compliance/VapeInteropTest.py
rename to tests/wlan/compliance/VapeInteropTest.py
index 32b39c6..f12bc7d 100644
--- a/src/antlion/tests/wlan/compliance/VapeInteropTest.py
+++ b/tests/wlan/compliance/VapeInteropTest.py
@@ -14,15 +14,16 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from mobly import asserts, signals, test_runner
+from mobly.records import TestResultRecord
+
 from antlion import utils
 from antlion.controllers.access_point import setup_ap
 from antlion.controllers.ap_lib import hostapd_constants
-from antlion.controllers.ap_lib.hostapd_security import Security
-from antlion.test_utils.abstract_devices.wlan_device import create_wlan_device
+from antlion.controllers.ap_lib.hostapd_security import Security, SecurityMode
+from antlion.test_utils.abstract_devices.wlan_device import AssociationMode
 from antlion.test_utils.wifi import base_test
 
-from mobly import asserts, test_runner
-
 
 class VapeInteropTest(base_test.WifiBaseTest):
     """Tests interoperability with mock third party AP profiles.
@@ -32,41 +33,34 @@
     * One Whirlwind Access Point
     """
 
-    def setup_class(self):
+    def setup_class(self) -> None:
         super().setup_class()
 
-        device_type = self.user_params.get("dut", "fuchsia_devices")
-        if device_type == "fuchsia_devices":
-            self.dut = create_wlan_device(self.fuchsia_devices[0])
-        elif device_type == "android_devices":
-            self.dut = create_wlan_device(self.android_devices[0])
-        else:
-            raise ValueError(
-                f'Invalid "dut" type specified in config: "{device_type}".'
-                'Expected "fuchsia_devices" or "android_devices".'
-            )
+        self.dut = self.get_dut(AssociationMode.POLICY)
 
+        if len(self.access_points) == 0:
+            raise signals.TestAbortClass("Requires at least one access point")
         self.access_point = self.access_points[0]
 
         # Same for both 2g and 5g
         self.ssid = utils.rand_ascii_str(hostapd_constants.AP_SSID_LENGTH_2G)
         self.password = utils.rand_ascii_str(hostapd_constants.AP_PASSPHRASE_LENGTH_2G)
         self.security_profile_wpa2 = Security(
-            security_mode=hostapd_constants.WPA2_STRING,
+            security_mode=SecurityMode.WPA2,
             password=self.password,
             wpa2_cipher=hostapd_constants.WPA2_DEFAULT_CIPER,
         )
 
         self.access_point.stop_all_aps()
 
-    def setup_test(self):
+    def setup_test(self) -> None:
         if hasattr(self, "android_devices"):
             for ad in self.android_devices:
                 ad.droid.wakeLockAcquireBright()
                 ad.droid.wakeUpNow()
         self.dut.wifi_toggle_state(True)
 
-    def teardown_test(self):
+    def teardown_test(self) -> None:
         if hasattr(self, "android_devices"):
             for ad in self.android_devices:
                 ad.droid.wakeLockRelease()
@@ -74,846 +68,877 @@
         self.dut.turn_location_off_and_scan_toggle_off()
         self.dut.disconnect()
         self.dut.reset_wifi()
-        self.download_ap_logs()
+        self.download_logs()
         self.access_point.stop_all_aps()
 
-    def on_fail(self, test_name, begin_time):
-        super().on_fail(test_name, begin_time)
+    def on_fail(self, record: TestResultRecord) -> None:
+        super().on_fail(record)
         self.access_point.stop_all_aps()
 
-    def test_associate_actiontec_pk5000_24ghz_open(self):
+    def test_associate_actiontec_pk5000_24ghz_open(self) -> None:
         setup_ap(
             access_point=self.access_point,
             profile_name="actiontec_pk5000",
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
             ssid=self.ssid,
         )
-        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+        asserts.assert_true(
+            self.dut.associate(self.ssid, SecurityMode.OPEN), "Failed to connect."
+        )
 
-    def test_associate_actiontec_pk5000_24ghz_wpa2(self):
+    def test_associate_actiontec_pk5000_24ghz_wpa2(self) -> None:
         setup_ap(
             access_point=self.access_point,
             profile_name="actiontec_pk5000",
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
             ssid=self.ssid,
             security=self.security_profile_wpa2,
-            password=self.password,
         )
         asserts.assert_true(
             self.dut.associate(
                 self.ssid,
+                SecurityMode.WPA2,
                 target_pwd=self.password,
-                target_security=hostapd_constants.WPA2_STRING,
             ),
             "Failed to connect.",
         )
 
-    def test_associate_actiontec_mi424wr_24ghz_open(self):
+    def test_associate_actiontec_mi424wr_24ghz_open(self) -> None:
         setup_ap(
             access_point=self.access_point,
             profile_name="actiontec_mi424wr",
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
             ssid=self.ssid,
         )
-        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+        asserts.assert_true(
+            self.dut.associate(self.ssid, SecurityMode.OPEN), "Failed to connect."
+        )
 
-    def test_associate_actiontec_mi424wr_24ghz_wpa2(self):
+    def test_associate_actiontec_mi424wr_24ghz_wpa2(self) -> None:
         setup_ap(
             access_point=self.access_point,
             profile_name="actiontec_mi424wr",
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
             ssid=self.ssid,
             security=self.security_profile_wpa2,
-            password=self.password,
         )
         asserts.assert_true(
             self.dut.associate(
                 self.ssid,
+                SecurityMode.WPA2,
                 target_pwd=self.password,
-                target_security=hostapd_constants.WPA2_STRING,
             ),
             "Failed to connect.",
         )
 
-    def test_associate_asus_rtac66u_24ghz_open(self):
+    def test_associate_asus_rtac66u_24ghz_open(self) -> None:
         setup_ap(
             access_point=self.access_point,
             profile_name="asus_rtac66u",
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
             ssid=self.ssid,
         )
-        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+        asserts.assert_true(
+            self.dut.associate(self.ssid, SecurityMode.OPEN), "Failed to connect."
+        )
 
-    def test_associate_asus_rtac66u_24ghz_wpa2(self):
+    def test_associate_asus_rtac66u_24ghz_wpa2(self) -> None:
         setup_ap(
             access_point=self.access_point,
             profile_name="asus_rtac66u",
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
             ssid=self.ssid,
             security=self.security_profile_wpa2,
-            password=self.password,
         )
         asserts.assert_true(
             self.dut.associate(
                 self.ssid,
+                SecurityMode.WPA2,
                 target_pwd=self.password,
-                target_security=hostapd_constants.WPA2_STRING,
             ),
             "Failed to connect.",
         )
 
-    def test_associate_asus_rtac66u_5ghz_open(self):
+    def test_associate_asus_rtac66u_5ghz_open(self) -> None:
         setup_ap(
             access_point=self.access_point,
             profile_name="asus_rtac66u",
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
             ssid=self.ssid,
         )
-        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+        asserts.assert_true(
+            self.dut.associate(self.ssid, SecurityMode.OPEN), "Failed to connect."
+        )
 
-    def test_associate_asus_rtac66u_5ghz_wpa2(self):
+    def test_associate_asus_rtac66u_5ghz_wpa2(self) -> None:
         setup_ap(
             access_point=self.access_point,
             profile_name="asus_rtac66u",
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
             ssid=self.ssid,
             security=self.security_profile_wpa2,
-            password=self.password,
         )
         asserts.assert_true(
             self.dut.associate(
                 self.ssid,
+                SecurityMode.WPA2,
                 target_pwd=self.password,
-                target_security=hostapd_constants.WPA2_STRING,
             ),
             "Failed to connect.",
         )
 
-    def test_associate_asus_rtac86u_24ghz_open(self):
+    def test_associate_asus_rtac86u_24ghz_open(self) -> None:
         setup_ap(
             access_point=self.access_point,
             profile_name="asus_rtac86u",
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
             ssid=self.ssid,
         )
-        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+        asserts.assert_true(
+            self.dut.associate(self.ssid, SecurityMode.OPEN), "Failed to connect."
+        )
 
-    def test_associate_asus_rtac86u_24ghz_wpa2(self):
+    def test_associate_asus_rtac86u_24ghz_wpa2(self) -> None:
         setup_ap(
             access_point=self.access_point,
             profile_name="asus_rtac86u",
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
             ssid=self.ssid,
             security=self.security_profile_wpa2,
-            password=self.password,
         )
         asserts.assert_true(
             self.dut.associate(
                 self.ssid,
+                SecurityMode.WPA2,
                 target_pwd=self.password,
-                target_security=hostapd_constants.WPA2_STRING,
             ),
             "Failed to connect.",
         )
 
-    def test_associate_asus_rtac86u_5ghz_open(self):
+    def test_associate_asus_rtac86u_5ghz_open(self) -> None:
         setup_ap(
             access_point=self.access_point,
             profile_name="asus_rtac86u",
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
             ssid=self.ssid,
         )
-        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+        asserts.assert_true(
+            self.dut.associate(self.ssid, SecurityMode.OPEN), "Failed to connect."
+        )
 
-    def test_associate_asus_rtac86u_5ghz_wpa2(self):
+    def test_associate_asus_rtac86u_5ghz_wpa2(self) -> None:
         setup_ap(
             access_point=self.access_point,
             profile_name="asus_rtac86u",
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
             ssid=self.ssid,
             security=self.security_profile_wpa2,
-            password=self.password,
         )
         asserts.assert_true(
             self.dut.associate(
                 self.ssid,
+                SecurityMode.WPA2,
                 target_pwd=self.password,
-                target_security=hostapd_constants.WPA2_STRING,
             ),
             "Failed to connect.",
         )
 
-    def test_associate_asus_rtac5300_24ghz_open(self):
+    def test_associate_asus_rtac5300_24ghz_open(self) -> None:
         setup_ap(
             access_point=self.access_point,
             profile_name="asus_rtac5300",
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
             ssid=self.ssid,
         )
-        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+        asserts.assert_true(
+            self.dut.associate(self.ssid, SecurityMode.OPEN), "Failed to connect."
+        )
 
-    def test_associate_asus_rtac5300_24ghz_wpa2(self):
+    def test_associate_asus_rtac5300_24ghz_wpa2(self) -> None:
         setup_ap(
             access_point=self.access_point,
             profile_name="asus_rtac5300",
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
             ssid=self.ssid,
             security=self.security_profile_wpa2,
-            password=self.password,
         )
         asserts.assert_true(
             self.dut.associate(
                 self.ssid,
+                SecurityMode.WPA2,
                 target_pwd=self.password,
-                target_security=hostapd_constants.WPA2_STRING,
             ),
             "Failed to connect.",
         )
 
-    def test_associate_asus_rtac5300_5ghz_open(self):
+    def test_associate_asus_rtac5300_5ghz_open(self) -> None:
         setup_ap(
             access_point=self.access_point,
             profile_name="asus_rtac5300",
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
             ssid=self.ssid,
         )
-        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+        asserts.assert_true(
+            self.dut.associate(self.ssid, SecurityMode.OPEN), "Failed to connect."
+        )
 
-    def test_associate_asus_rtac5300_5ghz_wpa2(self):
+    def test_associate_asus_rtac5300_5ghz_wpa2(self) -> None:
         setup_ap(
             access_point=self.access_point,
             profile_name="asus_rtac5300",
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
             ssid=self.ssid,
             security=self.security_profile_wpa2,
-            password=self.password,
         )
         asserts.assert_true(
             self.dut.associate(
                 self.ssid,
+                SecurityMode.WPA2,
                 target_pwd=self.password,
-                target_security=hostapd_constants.WPA2_STRING,
             ),
             "Failed to connect.",
         )
 
-    def test_associate_asus_rtn56u_24ghz_open(self):
+    def test_associate_asus_rtn56u_24ghz_open(self) -> None:
         setup_ap(
             access_point=self.access_point,
             profile_name="asus_rtn56u",
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
             ssid=self.ssid,
         )
-        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+        asserts.assert_true(
+            self.dut.associate(self.ssid, SecurityMode.OPEN), "Failed to connect."
+        )
 
-    def test_associate_asus_rtn56u_24ghz_wpa2(self):
+    def test_associate_asus_rtn56u_24ghz_wpa2(self) -> None:
         setup_ap(
             access_point=self.access_point,
             profile_name="asus_rtn56u",
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
             ssid=self.ssid,
             security=self.security_profile_wpa2,
-            password=self.password,
         )
         asserts.assert_true(
             self.dut.associate(
                 self.ssid,
+                SecurityMode.WPA2,
                 target_pwd=self.password,
-                target_security=hostapd_constants.WPA2_STRING,
             ),
             "Failed to connect.",
         )
 
-    def test_associate_asus_rtn56u_5ghz_open(self):
+    def test_associate_asus_rtn56u_5ghz_open(self) -> None:
         setup_ap(
             access_point=self.access_point,
             profile_name="asus_rtn56u",
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
             ssid=self.ssid,
         )
-        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+        asserts.assert_true(
+            self.dut.associate(self.ssid, SecurityMode.OPEN), "Failed to connect."
+        )
 
-    def test_associate_asus_rtn56u_5ghz_wpa2(self):
+    def test_associate_asus_rtn56u_5ghz_wpa2(self) -> None:
         setup_ap(
             access_point=self.access_point,
             profile_name="asus_rtn56u",
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
             ssid=self.ssid,
             security=self.security_profile_wpa2,
-            password=self.password,
         )
         asserts.assert_true(
             self.dut.associate(
                 self.ssid,
+                SecurityMode.WPA2,
                 target_pwd=self.password,
-                target_security=hostapd_constants.WPA2_STRING,
             ),
             "Failed to connect.",
         )
 
-    def test_associate_asus_rtn66u_24ghz_open(self):
+    def test_associate_asus_rtn66u_24ghz_open(self) -> None:
         setup_ap(
             access_point=self.access_point,
             profile_name="asus_rtn66u",
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
             ssid=self.ssid,
         )
-        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+        asserts.assert_true(
+            self.dut.associate(self.ssid, SecurityMode.OPEN), "Failed to connect."
+        )
 
-    def test_associate_asus_rtn66u_24ghz_wpa2(self):
+    def test_associate_asus_rtn66u_24ghz_wpa2(self) -> None:
         setup_ap(
             access_point=self.access_point,
             profile_name="asus_rtn66u",
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
             ssid=self.ssid,
             security=self.security_profile_wpa2,
-            password=self.password,
         )
         asserts.assert_true(
             self.dut.associate(
                 self.ssid,
+                SecurityMode.WPA2,
                 target_pwd=self.password,
-                target_security=hostapd_constants.WPA2_STRING,
             ),
             "Failed to connect.",
         )
 
-    def test_associate_asus_rtn66u_5ghz_open(self):
+    def test_associate_asus_rtn66u_5ghz_open(self) -> None:
         setup_ap(
             access_point=self.access_point,
             profile_name="asus_rtn66u",
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
             ssid=self.ssid,
         )
-        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+        asserts.assert_true(
+            self.dut.associate(self.ssid, SecurityMode.OPEN), "Failed to connect."
+        )
 
-    def test_associate_asus_rtn66u_5ghz_wpa2(self):
+    def test_associate_asus_rtn66u_5ghz_wpa2(self) -> None:
         setup_ap(
             access_point=self.access_point,
             profile_name="asus_rtn66u",
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
             ssid=self.ssid,
             security=self.security_profile_wpa2,
-            password=self.password,
         )
         asserts.assert_true(
             self.dut.associate(
                 self.ssid,
+                SecurityMode.WPA2,
                 target_pwd=self.password,
-                target_security=hostapd_constants.WPA2_STRING,
             ),
             "Failed to connect.",
         )
 
-    def test_associate_belkin_f9k1001v5_24ghz_open(self):
+    def test_associate_belkin_f9k1001v5_24ghz_open(self) -> None:
         setup_ap(
             access_point=self.access_point,
             profile_name="belkin_f9k1001v5",
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
             ssid=self.ssid,
         )
-        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+        asserts.assert_true(
+            self.dut.associate(self.ssid, SecurityMode.OPEN), "Failed to connect."
+        )
 
-    def test_associate_belkin_f9k1001v5_24ghz_wpa2(self):
+    def test_associate_belkin_f9k1001v5_24ghz_wpa2(self) -> None:
         setup_ap(
             access_point=self.access_point,
             profile_name="belkin_f9k1001v5",
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
             ssid=self.ssid,
             security=self.security_profile_wpa2,
-            password=self.password,
         )
         asserts.assert_true(
             self.dut.associate(
                 self.ssid,
+                SecurityMode.WPA2,
                 target_pwd=self.password,
-                target_security=hostapd_constants.WPA2_STRING,
             ),
             "Failed to connect.",
         )
 
-    def test_associate_linksys_ea4500_24ghz_open(self):
+    def test_associate_linksys_ea4500_24ghz_open(self) -> None:
         setup_ap(
             access_point=self.access_point,
             profile_name="linksys_ea4500",
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
             ssid=self.ssid,
         )
-        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+        asserts.assert_true(
+            self.dut.associate(self.ssid, SecurityMode.OPEN), "Failed to connect."
+        )
 
-    def test_associate_linksys_ea4500_24ghz_wpa2(self):
+    def test_associate_linksys_ea4500_24ghz_wpa2(self) -> None:
         setup_ap(
             access_point=self.access_point,
             profile_name="linksys_ea4500",
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_2G,
             ssid=self.ssid,
             security=self.security_profile_wpa2,
-            password=self.password,
         )
         asserts.assert_true(
             self.dut.associate(
                 self.ssid,
+                SecurityMode.WPA2,
                 target_pwd=self.password,
-                target_security=hostapd_constants.WPA2_STRING,
             ),
             "Failed to connect.",
         )
 
-    def test_associate_linksys_ea4500_5ghz_open(self):
+    def test_associate_linksys_ea4500_5ghz_open(self) -> None:
         setup_ap(
             access_point=self.access_point,
             profile_name="linksys_ea4500",
             channel=hostapd_constants.AP_DEFAULT_CHANNEL_5G,
             ssid=self.ssid,
         )
-        asserts.assert_true(self.dut.associate(self.ssid), "Failed to connect.")
+        asserts.assert_true(
+            self.dut.associate(self.ssid, SecurityMode.OPEN), "Failed to connect."
+        )
 
-    def test_associate_linksys_ea4500_5ghz_wpa2(self):
+    def test_associate_linksys_ea4500_5ghz_wpa2(self) -> None:
         setup_ap(